mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-13 01:50:46 +00:00
Compare commits
3 Commits
develop
...
erikj/oaut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ab46295ff | ||
|
|
eb30b2501a | ||
|
|
16b4e60a78 |
@@ -7,4 +7,4 @@ if command -v yum &> /dev/null; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Install a Rust toolchain
|
# Install a Rust toolchain
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal
|
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal
|
||||||
|
|||||||
147
.ci/scripts/auditwheel_wrapper.py
Executable file
147
.ci/scripts/auditwheel_wrapper.py
Executable file
@@ -0,0 +1,147 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
#
|
||||||
|
# Copyright (C) 2023 New Vector, Ltd
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
# published by the Free Software Foundation, either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# See the GNU Affero General Public License for more details:
|
||||||
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
#
|
||||||
|
# Originally licensed under the Apache License, Version 2.0:
|
||||||
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||||
|
#
|
||||||
|
# [This file includes modifications made by New Vector Limited]
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
|
||||||
|
# compatible wheel, if so rename the wheel before repairing it.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
from typing import Optional
|
||||||
|
from zipfile import ZipFile
|
||||||
|
|
||||||
|
from packaging.tags import Tag
|
||||||
|
from packaging.utils import parse_wheel_filename
|
||||||
|
from packaging.version import Version
|
||||||
|
|
||||||
|
|
||||||
|
def check_is_abi3_compatible(wheel_file: str) -> None:
|
||||||
|
"""Check the contents of the built wheel for any `.so` files that are *not*
|
||||||
|
abi3 compatible.
|
||||||
|
"""
|
||||||
|
|
||||||
|
with ZipFile(wheel_file, "r") as wheel:
|
||||||
|
for file in wheel.namelist():
|
||||||
|
if not file.endswith(".so"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not file.endswith(".abi3.so"):
|
||||||
|
raise Exception(f"Found non-abi3 lib: {file}")
|
||||||
|
|
||||||
|
|
||||||
|
def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
||||||
|
"""Replaces the cpython wheel file with a ABI3 compatible wheel"""
|
||||||
|
|
||||||
|
if tag.abi == "abi3":
|
||||||
|
# Nothing to do.
|
||||||
|
return wheel_file
|
||||||
|
|
||||||
|
check_is_abi3_compatible(wheel_file)
|
||||||
|
|
||||||
|
# HACK: it seems that some older versions of pip will consider a wheel marked
|
||||||
|
# as macosx_11_0 as incompatible with Big Sur. I haven't done the full archaeology
|
||||||
|
# here; there are some clues in
|
||||||
|
# https://github.com/pantsbuild/pants/pull/12857
|
||||||
|
# https://github.com/pypa/pip/issues/9138
|
||||||
|
# https://github.com/pypa/packaging/pull/319
|
||||||
|
# Empirically this seems to work, note that macOS 11 and 10.16 are the same,
|
||||||
|
# both versions are valid for backwards compatibility.
|
||||||
|
platform = tag.platform.replace("macosx_11_0", "macosx_10_16")
|
||||||
|
abi3_tag = Tag(tag.interpreter, "abi3", platform)
|
||||||
|
|
||||||
|
dirname = os.path.dirname(wheel_file)
|
||||||
|
new_wheel_file = os.path.join(
|
||||||
|
dirname,
|
||||||
|
f"{name}-{version}-{abi3_tag}.whl",
|
||||||
|
)
|
||||||
|
|
||||||
|
os.rename(wheel_file, new_wheel_file)
|
||||||
|
|
||||||
|
print("Renamed wheel to", new_wheel_file)
|
||||||
|
|
||||||
|
return new_wheel_file
|
||||||
|
|
||||||
|
|
||||||
|
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
|
||||||
|
"""Entry point"""
|
||||||
|
|
||||||
|
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
||||||
|
# normalizes the package name (i.e. it converts matrix_synapse ->
|
||||||
|
# matrix-synapse), which is not what we want.
|
||||||
|
_, version, build, tags = parse_wheel_filename(os.path.basename(wheel_file))
|
||||||
|
name = os.path.basename(wheel_file).split("-")[0]
|
||||||
|
|
||||||
|
if len(tags) != 1:
|
||||||
|
# We expect only a wheel file with only a single tag
|
||||||
|
raise Exception(f"Unexpectedly found multiple tags: {tags}")
|
||||||
|
|
||||||
|
tag = next(iter(tags))
|
||||||
|
|
||||||
|
if build:
|
||||||
|
# We don't use build tags in Synapse
|
||||||
|
raise Exception(f"Unexpected build tag: {build}")
|
||||||
|
|
||||||
|
# If the wheel is for cpython then convert it into an abi3 wheel.
|
||||||
|
if tag.interpreter.startswith("cp"):
|
||||||
|
wheel_file = cpython(wheel_file, name, version, tag)
|
||||||
|
|
||||||
|
# Finally, repair the wheel.
|
||||||
|
if archs is not None:
|
||||||
|
# If we are given archs then we are on macos and need to use
|
||||||
|
# `delocate-listdeps`.
|
||||||
|
subprocess.run(["delocate-listdeps", wheel_file], check=True)
|
||||||
|
subprocess.run(
|
||||||
|
["delocate-wheel", "--require-archs", archs, "-w", dest_dir, wheel_file],
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
subprocess.run(["auditwheel", "repair", "-w", dest_dir, wheel_file], check=True)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(description="Tag wheel as abi3 and repair it.")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--wheel-dir",
|
||||||
|
"-w",
|
||||||
|
metavar="WHEEL_DIR",
|
||||||
|
help="Directory to store delocated wheels",
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--require-archs",
|
||||||
|
metavar="archs",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"wheel_file",
|
||||||
|
metavar="WHEEL_FILE",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
wheel_file = args.wheel_file
|
||||||
|
wheel_dir = args.wheel_dir
|
||||||
|
archs = args.require_archs
|
||||||
|
|
||||||
|
main(wheel_file, wheel_dir, archs)
|
||||||
@@ -35,58 +35,49 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
|||||||
|
|
||||||
# First calculate the various trial jobs.
|
# First calculate the various trial jobs.
|
||||||
#
|
#
|
||||||
# For PRs, we only run each type of test with the oldest and newest Python
|
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||||
# version that's supported. The oldest version ensures we don't accidentally
|
# is Python 3.9 right now)
|
||||||
# introduce syntax or code that's too new, and the newest ensures we don't use
|
|
||||||
# code that's been dropped in the latest supported Python version.
|
|
||||||
|
|
||||||
trial_sqlite_tests = [
|
trial_sqlite_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.9",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"python-version": "3.14",
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "all",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
if not IS_PR:
|
if not IS_PR:
|
||||||
# Otherwise, check all supported Python versions.
|
|
||||||
#
|
|
||||||
# Avoiding running all of these versions on every PR saves on CI time.
|
|
||||||
trial_sqlite_tests.extend(
|
trial_sqlite_tests.extend(
|
||||||
{
|
{
|
||||||
"python-version": version,
|
"python-version": version,
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
for version in ("3.11", "3.12", "3.13")
|
for version in ("3.10", "3.11", "3.12", "3.13")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only test postgres against the earliest and latest Python versions that we
|
|
||||||
# support in order to save on CI time.
|
|
||||||
trial_postgres_tests = [
|
trial_postgres_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.9",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "14",
|
"postgres-version": "13",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"python-version": "3.14",
|
|
||||||
"database": "postgres",
|
|
||||||
"postgres-version": "17",
|
|
||||||
"extras": "all",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Ensure that Synapse passes unit tests even with no extra dependencies installed.
|
if not IS_PR:
|
||||||
|
trial_postgres_tests.append(
|
||||||
|
{
|
||||||
|
"python-version": "3.13",
|
||||||
|
"database": "postgres",
|
||||||
|
"postgres-version": "17",
|
||||||
|
"extras": "all",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
trial_no_extra_tests = [
|
trial_no_extra_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.9",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "",
|
"extras": "",
|
||||||
}
|
}
|
||||||
@@ -108,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
|
|||||||
|
|
||||||
# First calculate the various sytest jobs.
|
# First calculate the various sytest jobs.
|
||||||
#
|
#
|
||||||
# For each type of test we only run on bookworm on PRs
|
# For each type of test we only run on bullseye on PRs
|
||||||
|
|
||||||
|
|
||||||
sytest_tests = [
|
sytest_tests = [
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
@@ -136,11 +127,11 @@ if not IS_PR:
|
|||||||
sytest_tests.extend(
|
sytest_tests.extend(
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
|
|||||||
36
.ci/scripts/prepare_old_deps.sh
Executable file
36
.ci/scripts/prepare_old_deps.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# this script is run by GitHub Actions in a plain `jammy` container; it
|
||||||
|
# - installs the minimal system requirements, and poetry;
|
||||||
|
# - patches the project definition file to refer to old versions only;
|
||||||
|
# - creates a venv with these old versions using poetry; and finally
|
||||||
|
# - invokes `trial` to run the tests with old deps.
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||||
|
export VIRTUALENV_NO_DOWNLOAD=1
|
||||||
|
|
||||||
|
# TODO: in the future, we could use an implementation of
|
||||||
|
# https://github.com/python-poetry/poetry/issues/3527
|
||||||
|
# https://github.com/pypa/pip/issues/8085
|
||||||
|
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||||
|
|
||||||
|
# Patch the project definitions in-place:
|
||||||
|
# - Replace all lower and tilde bounds with exact bounds
|
||||||
|
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
||||||
|
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||||
|
# - Use pyopenssl 17.0, which is the oldest version that works with
|
||||||
|
# a `cryptography` compiled against OpenSSL 1.1.
|
||||||
|
# - Omit systemd: we're not logging to journal here.
|
||||||
|
|
||||||
|
sed -i \
|
||||||
|
-e "s/[~>]=/==/g" \
|
||||||
|
-e '/^python = "^/!s/\^/==/g' \
|
||||||
|
-e "/psycopg2/d" \
|
||||||
|
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||||
|
-e '/systemd/d' \
|
||||||
|
pyproject.toml
|
||||||
|
|
||||||
|
echo "::group::Patched pyproject.toml"
|
||||||
|
cat pyproject.toml
|
||||||
|
echo "::endgroup::"
|
||||||
@@ -61,7 +61,7 @@ poetry run update_synapse_database --database-config .ci/postgres-config-unporte
|
|||||||
echo "+++ Comparing ported schema with unported schema"
|
echo "+++ Comparing ported schema with unported schema"
|
||||||
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
||||||
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse_unported > unported.sql
|
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse > ported.sql
|
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
|
||||||
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
||||||
diff -u unported.sql ported.sql | tee schema_diff
|
diff -u unported.sql ported.sql | tee schema_diff
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# 1) Resolve project ID.
|
|
||||||
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
|
|
||||||
|
|
||||||
# 2) Find existing item (project card) for this issue.
|
|
||||||
ITEM_ID=$(
|
|
||||||
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
|
|
||||||
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
|
|
||||||
)
|
|
||||||
|
|
||||||
# 3) If one doesn't exist, add this issue to the project.
|
|
||||||
if [ -z "${ITEM_ID:-}" ]; then
|
|
||||||
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 4) Get Status field id + the option id for TARGET_STATUS.
|
|
||||||
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
|
|
||||||
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
|
|
||||||
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
|
|
||||||
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
|
|
||||||
|
|
||||||
if [ -z "${OPTION_ID:-}" ]; then
|
|
||||||
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 5) Set Status (moves item to the matching column in the board view).
|
|
||||||
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"
|
|
||||||
@@ -26,8 +26,3 @@ c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
|||||||
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
||||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||||
|
|
||||||
# Use type hinting generics in standard collections (https://github.com/element-hq/synapse/pull/19046)
|
|
||||||
fc244bb592aa481faf28214a2e2ce3bb4e95d990
|
|
||||||
|
|
||||||
# Write union types as X | Y where possible (https://github.com/element-hq/synapse/pull/19111)
|
|
||||||
fcac7e0282b074d4bd3414d1c9c181e9701875d9
|
|
||||||
|
|||||||
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -9,4 +9,5 @@
|
|||||||
- End with either a period (.) or an exclamation mark (!).
|
- End with either a period (.) or an exclamation mark (!).
|
||||||
- Start with a capital letter.
|
- Start with a capital letter.
|
||||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
||||||
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct
|
||||||
|
(run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
||||||
|
|||||||
69
.github/dependabot.yml
vendored
69
.github/dependabot.yml
vendored
@@ -1,92 +1,23 @@
|
|||||||
version: 2
|
version: 2
|
||||||
# As dependabot is currently only run on a weekly basis, we raise the
|
|
||||||
# open-pull-requests-limit to 10 (from the default of 5) to better ensure we
|
|
||||||
# don't continuously grow a backlog of updates.
|
|
||||||
updates:
|
updates:
|
||||||
- # "pip" is the correct setting for poetry, per https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
- # "pip" is the correct setting for poetry, per https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
||||||
package-ecosystem: "pip"
|
package-ecosystem: "pip"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
open-pull-requests-limit: 10
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
# Group patch updates to packages together into a single PR, as they rarely
|
|
||||||
# if ever contain breaking changes that need to be reviewed separately.
|
|
||||||
#
|
|
||||||
# Less PRs means a streamlined review process.
|
|
||||||
#
|
|
||||||
# Python packages follow semantic versioning, and tend to only introduce
|
|
||||||
# breaking changes in major version bumps. Thus, we'll group minor and patch
|
|
||||||
# versions together.
|
|
||||||
groups:
|
|
||||||
minor-and-patches:
|
|
||||||
applies-to: version-updates
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
update-types:
|
|
||||||
- "minor"
|
|
||||||
- "patch"
|
|
||||||
# Prevent pulling packages that were recently updated to help mitigate
|
|
||||||
# supply chain attacks. 14 days was taken from the recommendation at
|
|
||||||
# https://blog.yossarian.net/2025/11/21/We-should-all-be-using-dependency-cooldowns
|
|
||||||
# where the author noted that 9/10 attacks would have been mitigated by a
|
|
||||||
# two week cooldown.
|
|
||||||
#
|
|
||||||
# The cooldown only applies to general updates; security updates will still
|
|
||||||
# be pulled in as soon as possible.
|
|
||||||
cooldown:
|
|
||||||
default-days: 14
|
|
||||||
|
|
||||||
- package-ecosystem: "docker"
|
- package-ecosystem: "docker"
|
||||||
directory: "/docker"
|
directory: "/docker"
|
||||||
open-pull-requests-limit: 10
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
# For container versions, breaking changes are also typically only introduced in major
|
|
||||||
# package bumps.
|
|
||||||
groups:
|
|
||||||
minor-and-patches:
|
|
||||||
applies-to: version-updates
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
update-types:
|
|
||||||
- "minor"
|
|
||||||
- "patch"
|
|
||||||
cooldown:
|
|
||||||
default-days: 14
|
|
||||||
|
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
open-pull-requests-limit: 10
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
# Similarly for GitHub Actions, breaking changes are typically only introduced in major
|
|
||||||
# package bumps.
|
|
||||||
groups:
|
|
||||||
minor-and-patches:
|
|
||||||
applies-to: version-updates
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
update-types:
|
|
||||||
- "minor"
|
|
||||||
- "patch"
|
|
||||||
cooldown:
|
|
||||||
default-days: 14
|
|
||||||
|
|
||||||
- package-ecosystem: "cargo"
|
- package-ecosystem: "cargo"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
open-pull-requests-limit: 10
|
|
||||||
versioning-strategy: "lockfile-only"
|
versioning-strategy: "lockfile-only"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
# The Rust ecosystem is special in that breaking changes are often introduced
|
|
||||||
# in minor version bumps, as packages typically stay pre-1.0 for a long time.
|
|
||||||
# Thus we specifically keep minor version bumps separate in their own PRs.
|
|
||||||
groups:
|
|
||||||
patches:
|
|
||||||
applies-to: version-updates
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
update-types:
|
|
||||||
- "patch"
|
|
||||||
cooldown:
|
|
||||||
default-days: 14
|
|
||||||
|
|||||||
148
.github/workflows/docker.yml
vendored
148
.github/workflows/docker.yml
vendored
@@ -5,7 +5,7 @@ name: Build docker images
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags: ["v*"]
|
tags: ["v*"]
|
||||||
branches: [master, main, develop]
|
branches: [ master, main, develop ]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
@@ -14,24 +14,26 @@ permissions:
|
|||||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build and push image for ${{ matrix.platform }}
|
runs-on: ubuntu-22.04
|
||||||
runs-on: ${{ matrix.runs_on }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- platform: linux/amd64
|
|
||||||
runs_on: ubuntu-24.04
|
|
||||||
suffix: linux-amd64
|
|
||||||
- platform: linux/arm64
|
|
||||||
runs_on: ubuntu-24.04-arm
|
|
||||||
suffix: linux-arm64
|
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set up QEMU
|
||||||
|
id: qemu
|
||||||
|
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||||
|
|
||||||
|
- name: Inspect builder
|
||||||
|
run: docker buildx inspect
|
||||||
|
|
||||||
|
- name: Install Cosign
|
||||||
|
uses: sigstore/cosign-installer@d7d6bc7722e3daa8354c50bcb52f4837da5e9b6a # v3.8.1
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Extract version from pyproject.toml
|
- name: Extract version from pyproject.toml
|
||||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||||
@@ -41,91 +43,25 @@ jobs:
|
|||||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push by digest
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
labels: |
|
|
||||||
gitsha1=${{ github.sha }}
|
|
||||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
|
||||||
tags: |
|
|
||||||
docker.io/matrixdotorg/synapse
|
|
||||||
ghcr.io/element-hq/synapse
|
|
||||||
file: "docker/Dockerfile"
|
|
||||||
platforms: ${{ matrix.platform }}
|
|
||||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
|
||||||
|
|
||||||
- name: Export digest
|
|
||||||
run: |
|
|
||||||
mkdir -p ${{ runner.temp }}/digests
|
|
||||||
digest="${{ steps.build.outputs.digest }}"
|
|
||||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
|
||||||
|
|
||||||
- name: Upload digest
|
|
||||||
uses: actions/upload-artifact@v5
|
|
||||||
with:
|
|
||||||
name: digests-${{ matrix.suffix }}
|
|
||||||
path: ${{ runner.temp }}/digests/*
|
|
||||||
if-no-files-found: error
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
merge:
|
|
||||||
name: Push merged images to ${{ matrix.repository }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
repository:
|
|
||||||
- docker.io/matrixdotorg/synapse
|
|
||||||
- ghcr.io/element-hq/synapse
|
|
||||||
|
|
||||||
needs:
|
|
||||||
- build
|
|
||||||
steps:
|
|
||||||
- name: Download digests
|
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
|
||||||
with:
|
|
||||||
path: ${{ runner.temp }}/digests
|
|
||||||
pattern: digests-*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
|
||||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Log in to GHCR
|
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
|
||||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
|
||||||
|
|
||||||
- name: Install Cosign
|
|
||||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
|
||||||
|
|
||||||
- name: Calculate docker image tag
|
- name: Calculate docker image tag
|
||||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
id: set-tag
|
||||||
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
with:
|
with:
|
||||||
images: ${{ matrix.repository }}
|
images: |
|
||||||
|
docker.io/matrixdotorg/synapse
|
||||||
|
ghcr.io/element-hq/synapse
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=false
|
latest=false
|
||||||
tags: |
|
tags: |
|
||||||
@@ -133,23 +69,31 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||||
type=pep440,pattern={{raw}}
|
type=pep440,pattern={{raw}}
|
||||||
type=sha
|
|
||||||
|
|
||||||
- name: Create manifest list and push
|
- name: Build and push all platforms
|
||||||
working-directory: ${{ runner.temp }}/digests
|
id: build-and-push
|
||||||
env:
|
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||||
REPOSITORY: ${{ matrix.repository }}
|
with:
|
||||||
run: |
|
push: true
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
labels: |
|
||||||
$(printf "$REPOSITORY@sha256:%s " *)
|
gitsha1=${{ github.sha }}
|
||||||
|
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||||
|
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||||
|
file: "docker/Dockerfile"
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
- name: Sign each manifest
|
# arm64 builds OOM without the git fetch setting. c.f.
|
||||||
|
# https://github.com/rust-lang/cargo/issues/10583
|
||||||
|
build-args: |
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||||
|
|
||||||
|
- name: Sign the images with GitHub OIDC Token
|
||||||
env:
|
env:
|
||||||
REPOSITORY: ${{ matrix.repository }}
|
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||||
|
TAGS: ${{ steps.set-tag.outputs.tags }}
|
||||||
run: |
|
run: |
|
||||||
DIGESTS=""
|
images=""
|
||||||
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
|
for tag in ${TAGS}; do
|
||||||
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
|
images+="${tag}@${DIGEST} "
|
||||||
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
|
|
||||||
done
|
done
|
||||||
cosign sign --yes $DIGESTS
|
cosign sign --yes ${images}
|
||||||
|
|||||||
34
.github/workflows/docs-pr-netlify.yaml
vendored
Normal file
34
.github/workflows/docs-pr-netlify.yaml
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
name: Deploy documentation PR preview
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: [ "Prepare documentation PR preview" ]
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
netlify:
|
||||||
|
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||||
|
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||||
|
- name: 📥 Download artifact
|
||||||
|
uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9
|
||||||
|
with:
|
||||||
|
workflow: docs-pr.yaml
|
||||||
|
run_id: ${{ github.event.workflow_run.id }}
|
||||||
|
name: book
|
||||||
|
path: book
|
||||||
|
|
||||||
|
- name: 📤 Deploy to Netlify
|
||||||
|
uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3
|
||||||
|
with:
|
||||||
|
path: book
|
||||||
|
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||||
|
branch: ${{ github.event.workflow_run.head_branch }}
|
||||||
|
revision: ${{ github.event.workflow_run.head_sha }}
|
||||||
|
token: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||||
|
site_id: ${{ secrets.NETLIFY_SITE_ID }}
|
||||||
|
desc: Documentation preview
|
||||||
|
deployment_env: PR Documentation Preview
|
||||||
8
.github/workflows/docs-pr.yaml
vendored
8
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
name: GitHub Pages
|
name: GitHub Pages
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: book
|
name: book
|
||||||
path: book
|
path: book
|
||||||
@@ -50,7 +50,7 @@ jobs:
|
|||||||
name: Check links in documentation
|
name: Check links in documentation
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||||
|
|||||||
16
.github/workflows/docs.yaml
vendored
16
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- pre
|
- pre
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -64,7 +64,7 @@ jobs:
|
|||||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
@@ -78,18 +78,6 @@ jobs:
|
|||||||
mdbook build
|
mdbook build
|
||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Prepare and publish schema files
|
|
||||||
run: |
|
|
||||||
sudo apt-get update && sudo apt-get install -y yq
|
|
||||||
mkdir -p book/schema
|
|
||||||
# Remove developer notice before publishing.
|
|
||||||
rm schema/v*/Do\ not\ edit\ files\ in\ this\ folder
|
|
||||||
# Copy schema files that are independent from current Synapse version.
|
|
||||||
cp -r -t book/schema schema/v*/
|
|
||||||
# Convert config schema from YAML source file to JSON.
|
|
||||||
yq < schema/synapse-config.schema.yaml \
|
|
||||||
> book/schema/synapse-config.schema.json
|
|
||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||||
|
|||||||
17
.github/workflows/fix_lint.yaml
vendored
17
.github/workflows/fix_lint.yaml
vendored
@@ -6,11 +6,6 @@ name: Attempt to automatically fix linting errors
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
|
||||||
# We use nightly so that `fmt` correctly groups together imports, and
|
|
||||||
# clippy correctly fixes up the benchmarks.
|
|
||||||
RUST_VERSION: nightly-2025-06-24
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fixup:
|
fixup:
|
||||||
name: Fix up
|
name: Fix up
|
||||||
@@ -18,14 +13,16 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
# We use nightly so that `fmt` correctly groups together imports, and
|
||||||
|
# clippy correctly fixes up the benchmarks.
|
||||||
|
toolchain: nightly-2022-12-01
|
||||||
components: clippy, rustfmt
|
components: clippy, rustfmt
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
@@ -47,6 +44,6 @@ jobs:
|
|||||||
- run: cargo fmt
|
- run: cargo fmt
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
|
||||||
- uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
- uses: stefanzweifel/git-auto-commit-action@e348103e9026cc0eee72ae06630dbe30c8bf7a79 # v5.1.0
|
||||||
with:
|
with:
|
||||||
commit_message: "Attempt to fix linting"
|
commit_message: "Attempt to fix linting"
|
||||||
|
|||||||
43
.github/workflows/latest_deps.yml
vendored
43
.github/workflows/latest_deps.yml
vendored
@@ -21,9 +21,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
check_repo:
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||||
@@ -42,12 +39,10 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||||
# poetry-core versions), so we install with poetry.
|
# poetry-core versions), so we install with poetry.
|
||||||
@@ -60,7 +55,7 @@ jobs:
|
|||||||
- run: poetry run pip list > before.txt
|
- run: poetry run pip list > before.txt
|
||||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||||
# `pip install matrix-synapse[all]` as closely as possible.
|
# `pip install matrix-synapse[all]` as closely as possible.
|
||||||
- run: poetry update --without dev
|
- run: poetry update --no-dev
|
||||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||||
- name: Remove unhelpful options from mypy config
|
- name: Remove unhelpful options from mypy config
|
||||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||||
@@ -77,13 +72,11 @@ jobs:
|
|||||||
postgres-version: "14"
|
postgres-version: "14"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||||
@@ -93,7 +86,7 @@ jobs:
|
|||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.postgres-version }}
|
postgres:${{ matrix.postgres-version }}
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: pip install .[all,test]
|
- run: pip install .[all,test]
|
||||||
@@ -139,9 +132,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- sytest-tag: bookworm
|
- sytest-tag: bullseye
|
||||||
|
|
||||||
- sytest-tag: bookworm
|
- sytest-tag: bullseye
|
||||||
postgres: postgres
|
postgres: postgres
|
||||||
workers: workers
|
workers: workers
|
||||||
redis: redis
|
redis: redis
|
||||||
@@ -152,13 +145,11 @@ jobs:
|
|||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Ensure sytest runs `pip install`
|
- name: Ensure sytest runs `pip install`
|
||||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||||
@@ -173,7 +164,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -202,14 +193,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out synapse codebase
|
- name: Check out synapse codebase
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -234,7 +225,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
|||||||
name: "Check locked dependencies have sdists"
|
name: "Check locked dependencies have sdists"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- run: pip install tomli
|
- run: pip install tomli
|
||||||
|
|||||||
10
.github/workflows/push_complement_image.yml
vendored
10
.github/workflows/push_complement_image.yml
vendored
@@ -33,29 +33,29 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout specific branch (debug build)
|
- name: Checkout specific branch (debug build)
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.branch }}
|
ref: ${{ inputs.branch }}
|
||||||
- name: Checkout clean copy of develop (scheduled build)
|
- name: Checkout clean copy of develop (scheduled build)
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule'
|
||||||
with:
|
with:
|
||||||
ref: develop
|
ref: develop
|
||||||
- name: Checkout clean copy of master (on-push)
|
- name: Checkout clean copy of master (on-push)
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
with:
|
with:
|
||||||
ref: master
|
ref: master
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Work out labels for complement image
|
- name: Work out labels for complement image
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||||
tags: |
|
tags: |
|
||||||
|
|||||||
99
.github/workflows/release-artifacts.yml
vendored
99
.github/workflows/release-artifacts.yml
vendored
@@ -5,7 +5,7 @@ name: Build release artifacts
|
|||||||
on:
|
on:
|
||||||
# we build on PRs and develop to (hopefully) get early warning
|
# we build on PRs and develop to (hopefully) get early warning
|
||||||
# of things breaking (but only build one set of debs). PRs skip
|
# of things breaking (but only build one set of debs). PRs skip
|
||||||
# building wheels on ARM.
|
# building wheels on macOS & ARM.
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches: ["develop", "release-*"]
|
branches: ["develop", "release-*"]
|
||||||
@@ -27,10 +27,10 @@ jobs:
|
|||||||
name: "Calculate list of debian distros"
|
name: "Calculate list of debian distros"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: '3.x'
|
||||||
- id: set-distros
|
- id: set-distros
|
||||||
run: |
|
run: |
|
||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||||
@@ -55,18 +55,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
path: src
|
path: src
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
- name: Set up docker layer caching
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
@@ -74,9 +74,9 @@ jobs:
|
|||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
- name: Set up python
|
- name: Set up python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: '3.x'
|
||||||
|
|
||||||
- name: Build the packages
|
- name: Build the packages
|
||||||
# see https://github.com/docker/build-push-action/issues/252
|
# see https://github.com/docker/build-push-action/issues/252
|
||||||
@@ -101,60 +101,73 @@ jobs:
|
|||||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
- name: Upload debs as artifacts
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
||||||
path: debs/*
|
path: debs/*
|
||||||
|
|
||||||
build-wheels:
|
build-wheels:
|
||||||
name: Build wheels on ${{ matrix.os }}
|
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os:
|
os: [ubuntu-22.04, macos-13]
|
||||||
- ubuntu-24.04
|
arch: [x86_64, aarch64]
|
||||||
- ubuntu-24.04-arm
|
|
||||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||||
# It is not read by the rest of the workflow.
|
# It is not read by the rest of the workflow.
|
||||||
is_pr:
|
is_pr:
|
||||||
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
||||||
|
|
||||||
exclude:
|
exclude:
|
||||||
|
# Don't build macos wheels on PR CI.
|
||||||
|
- is_pr: true
|
||||||
|
os: "macos-13"
|
||||||
|
# Don't build aarch64 wheels on mac.
|
||||||
|
- os: "macos-13"
|
||||||
|
arch: aarch64
|
||||||
# Don't build aarch64 wheels on PR CI.
|
# Don't build aarch64 wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "ubuntu-24.04-arm"
|
arch: aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||||
# here, because `python` on osx points to Python 2.7.
|
# here, because `python` on osx points to Python 2.7.
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
- name: Install cibuildwheel
|
||||||
run: python -m pip install cibuildwheel==3.2.1
|
run: python -m pip install cibuildwheel==2.23.0
|
||||||
|
|
||||||
|
- name: Set up QEMU to emulate aarch64
|
||||||
|
if: matrix.arch == 'aarch64'
|
||||||
|
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
|
|
||||||
|
- name: Build aarch64 wheels
|
||||||
|
if: matrix.arch == 'aarch64'
|
||||||
|
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Only build a single wheel on PR
|
- name: Only build a single wheel on PR
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV
|
run: echo "CIBW_BUILD="cp39-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
run: python -m cibuildwheel --output-dir wheelhouse
|
||||||
env:
|
env:
|
||||||
# The platforms that we build for are determined by the
|
# Skip testing for platforms which various libraries don't have wheels
|
||||||
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
|
# for, and so need extra build deps.
|
||||||
|
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||||
|
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||||
|
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||||
|
|
||||||
# We skip testing wheels for the following platforms in CI:
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
#
|
|
||||||
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
|
|
||||||
# musl: (TODO: investigate).
|
|
||||||
CIBW_TEST_SKIP: pp3*-* *musl*
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
|
||||||
with:
|
with:
|
||||||
name: Wheel-${{ matrix.os }}
|
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
|
||||||
path: ./wheelhouse/*.whl
|
path: ./wheelhouse/*.whl
|
||||||
|
|
||||||
build-sdist:
|
build-sdist:
|
||||||
@@ -163,21 +176,22 @@ jobs:
|
|||||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: '3.10'
|
||||||
|
|
||||||
- run: pip install build
|
- run: pip install build
|
||||||
|
|
||||||
- name: Build sdist
|
- name: Build sdist
|
||||||
run: python -m build --sdist
|
run: python -m build --sdist
|
||||||
|
|
||||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: Sdist
|
name: Sdist
|
||||||
path: dist/*.tar.gz
|
path: dist/*.tar.gz
|
||||||
|
|
||||||
|
|
||||||
# if it's a tag, create a release and attach the artifacts to it
|
# if it's a tag, create a release and attach the artifacts to it
|
||||||
attach-assets:
|
attach-assets:
|
||||||
name: "Attach assets to release"
|
name: "Attach assets to release"
|
||||||
@@ -189,7 +203,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download all workflow run artifacts
|
- name: Download all workflow run artifacts
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||||
- name: Build a tarball for the debs
|
- name: Build a tarball for the debs
|
||||||
# We need to merge all the debs uploads into one folder, then compress
|
# We need to merge all the debs uploads into one folder, then compress
|
||||||
# that.
|
# that.
|
||||||
@@ -198,11 +212,16 @@ jobs:
|
|||||||
mv debs*/* debs/
|
mv debs*/* debs/
|
||||||
tar -cvJf debs.tar.xz debs
|
tar -cvJf debs.tar.xz debs
|
||||||
- name: Attach to release
|
- name: Attach to release
|
||||||
|
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
|
||||||
|
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
with:
|
||||||
gh release upload "${{ github.ref_name }}" \
|
files: |
|
||||||
Sdist/* \
|
Sdist/*
|
||||||
Wheel*/* \
|
Wheel*/*
|
||||||
debs.tar.xz \
|
debs.tar.xz
|
||||||
--repo ${{ github.repository }}
|
# if it's not already published, keep the release as a draft.
|
||||||
|
draft: true
|
||||||
|
# mark it as a prerelease if the tag contains 'rc'.
|
||||||
|
prerelease: ${{ contains(github.ref, 'rc') }}
|
||||||
|
|||||||
57
.github/workflows/schema.yaml
vendored
57
.github/workflows/schema.yaml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: Schema
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- schema/**
|
|
||||||
- docs/usage/configuration/config_documentation.md
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
validate-schema:
|
|
||||||
name: Ensure Synapse config schema is valid
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- name: Install check-jsonschema
|
|
||||||
run: pip install check-jsonschema==0.33.0
|
|
||||||
|
|
||||||
- name: Validate meta schema
|
|
||||||
run: check-jsonschema --check-metaschema schema/v*/meta.schema.json
|
|
||||||
- name: Validate schema
|
|
||||||
run: |-
|
|
||||||
# Please bump on introduction of a new meta schema.
|
|
||||||
LATEST_META_SCHEMA_VERSION=v1
|
|
||||||
check-jsonschema \
|
|
||||||
--schemafile="schema/$LATEST_META_SCHEMA_VERSION/meta.schema.json" \
|
|
||||||
schema/synapse-config.schema.yaml
|
|
||||||
- name: Validate default config
|
|
||||||
# Populates the empty instance with default values and checks against the schema.
|
|
||||||
run: |-
|
|
||||||
echo "{}" | check-jsonschema \
|
|
||||||
--fill-defaults --schemafile=schema/synapse-config.schema.yaml -
|
|
||||||
|
|
||||||
check-doc-generation:
|
|
||||||
name: Ensure generated documentation is up-to-date
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- name: Install PyYAML
|
|
||||||
run: pip install PyYAML==6.0.2
|
|
||||||
|
|
||||||
- name: Regenerate config documentation
|
|
||||||
run: |
|
|
||||||
scripts-dev/gen_config_documentation.py \
|
|
||||||
schema/synapse-config.schema.yaml \
|
|
||||||
> docs/usage/configuration/config_documentation.md
|
|
||||||
- name: Error in case of any differences
|
|
||||||
# Errors if there are now any modified files (untracked files are ignored).
|
|
||||||
run: 'git diff --exit-code'
|
|
||||||
219
.github/workflows/tests.yml
vendored
219
.github/workflows/tests.yml
vendored
@@ -11,9 +11,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||||
# don't modify Rust code.
|
# don't modify Rust code.
|
||||||
@@ -86,12 +83,10 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
@@ -106,18 +101,18 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20' 'sqlglot>=28.0.0'"
|
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
- run: scripts-dev/check_schema_delta.py --force-colors
|
||||||
|
|
||||||
check-lockfile:
|
check-lockfile:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: .ci/scripts/check_lockfile.py
|
- run: .ci/scripts/check_lockfile.py
|
||||||
@@ -129,7 +124,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
@@ -151,13 +146,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
@@ -174,7 +167,7 @@ jobs:
|
|||||||
# Cribbed from
|
# Cribbed from
|
||||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||||
- name: Restore/persist mypy's cache
|
- name: Restore/persist mypy's cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
.mypy_cache
|
.mypy_cache
|
||||||
@@ -187,20 +180,19 @@ jobs:
|
|||||||
lint-crlf:
|
lint-crlf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- name: Check line endings
|
- name: Check line endings
|
||||||
run: scripts-dev/check_line_terminators.sh
|
run: scripts-dev/check_line_terminators.sh
|
||||||
|
|
||||||
lint-newsfile:
|
lint-newsfile:
|
||||||
# Only run on pull_request events, targeting develop/release branches, and skip when the PR author is dependabot[bot].
|
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||||
if: ${{ github.event_name == 'pull_request' && (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.event.pull_request.user.login != 'dependabot[bot]' }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||||
@@ -208,20 +200,37 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||||
|
|
||||||
|
lint-pydantic:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: changes
|
||||||
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
- name: Install Rust
|
||||||
|
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||||
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
|
with:
|
||||||
|
poetry-version: "2.1.1"
|
||||||
|
extras: "all"
|
||||||
|
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||||
|
|
||||||
lint-clippy:
|
lint-clippy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- run: cargo clippy -- -D warnings
|
- run: cargo clippy -- -D warnings
|
||||||
|
|
||||||
@@ -233,70 +242,32 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2025-04-23
|
toolchain: nightly-2022-12-01
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
|
|
||||||
- run: cargo clippy --all-features -- -D warnings
|
- run: cargo clippy --all-features -- -D warnings
|
||||||
|
|
||||||
lint-rust:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
|
||||||
with:
|
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
|
||||||
with:
|
|
||||||
# Install like a normal project from source with all optional dependencies
|
|
||||||
extras: all
|
|
||||||
install-project: "true"
|
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
|
|
||||||
# The `::error::` syntax is using GitHub Actions' error annotations, see
|
|
||||||
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
|
|
||||||
run: |
|
|
||||||
if git diff --quiet Cargo.lock; then
|
|
||||||
echo "Cargo.lock is up to date"
|
|
||||||
else
|
|
||||||
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
|
|
||||||
git diff --exit-code Cargo.lock
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
|
|
||||||
# for some of the unstable options we use in `.rustfmt.toml`.
|
|
||||||
lint-rustfmt:
|
lint-rustfmt:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||||
with:
|
with:
|
||||||
# We use nightly so that we can use some unstable options that we use in
|
# We use nightly so that it correctly groups together imports
|
||||||
# `.rustfmt.toml`.
|
toolchain: nightly-2022-12-01
|
||||||
toolchain: nightly-2025-04-23
|
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
|
|
||||||
- run: cargo fmt --check
|
- run: cargo fmt --check
|
||||||
|
|
||||||
@@ -307,8 +278,8 @@ jobs:
|
|||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install rstcheck"
|
- run: "pip install rstcheck"
|
||||||
@@ -322,12 +293,12 @@ jobs:
|
|||||||
- lint-mypy
|
- lint-mypy
|
||||||
- lint-crlf
|
- lint-crlf
|
||||||
- lint-newsfile
|
- lint-newsfile
|
||||||
|
- lint-pydantic
|
||||||
- check-sampleconfig
|
- check-sampleconfig
|
||||||
- check-schema-delta
|
- check-schema-delta
|
||||||
- check-lockfile
|
- check-lockfile
|
||||||
- lint-clippy
|
- lint-clippy
|
||||||
- lint-clippy-nightly
|
- lint-clippy-nightly
|
||||||
- lint-rust
|
|
||||||
- lint-rustfmt
|
- lint-rustfmt
|
||||||
- lint-readme
|
- lint-readme
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -343,9 +314,9 @@ jobs:
|
|||||||
lint
|
lint
|
||||||
lint-mypy
|
lint-mypy
|
||||||
lint-newsfile
|
lint-newsfile
|
||||||
|
lint-pydantic
|
||||||
lint-clippy
|
lint-clippy
|
||||||
lint-clippy-nightly
|
lint-clippy-nightly
|
||||||
lint-rust
|
|
||||||
lint-rustfmt
|
lint-rustfmt
|
||||||
lint-readme
|
lint-readme
|
||||||
|
|
||||||
@@ -355,8 +326,8 @@ jobs:
|
|||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- id: get-matrix
|
- id: get-matrix
|
||||||
@@ -376,7 +347,7 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -391,10 +362,8 @@ jobs:
|
|||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
with:
|
with:
|
||||||
@@ -432,13 +401,11 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
# There aren't wheels for some of the older deps, so we need to install
|
||||||
# their build dependencies
|
# their build dependencies
|
||||||
@@ -447,17 +414,19 @@ jobs:
|
|||||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||||
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.9'
|
||||||
|
|
||||||
- name: Prepare old deps
|
- name: Prepare old deps
|
||||||
# Note: we install using `uv` here, not poetry or pip to allow us to test with the
|
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||||
# minimum version of all dependencies, both those explicitly specified and those
|
run: .ci/scripts/prepare_old_deps.sh
|
||||||
# implicitly brought in by the explicit dependencies.
|
|
||||||
run: |
|
# Note: we install using `pip` here, not poetry. `poetry install` ignores the
|
||||||
pip install uv
|
# build-system section (https://github.com/python-poetry/poetry/issues/6154), but
|
||||||
uv pip install --system --resolution=lowest .[all,test]
|
# we explicitly want to test that you can `pip install` using the oldest version
|
||||||
|
# of poetry-core and setuptools-rust.
|
||||||
|
- run: pip install .[all,test]
|
||||||
|
|
||||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||||
# (rather than use an editable install, which we no longer support). If we
|
# (rather than use an editable install, which we no longer support). If we
|
||||||
@@ -491,11 +460,11 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy-3.10"]
|
python-version: ["pypy-3.9"]
|
||||||
extras: ["all"]
|
extras: ["all"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
@@ -545,15 +514,13 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- name: Prepare test blacklist
|
- name: Prepare test blacklist
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
run: /bootstrap.sh synapse
|
run: /bootstrap.sh synapse
|
||||||
@@ -562,7 +529,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||||
@@ -592,7 +559,7 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
with:
|
with:
|
||||||
@@ -615,10 +582,10 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- python-version: "3.10"
|
- python-version: "3.9"
|
||||||
postgres-version: "14"
|
postgres-version: "13"
|
||||||
|
|
||||||
- python-version: "3.14"
|
- python-version: "3.13"
|
||||||
postgres-version: "17"
|
postgres-version: "17"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
@@ -636,7 +603,7 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- name: Add PostgreSQL apt repository
|
- name: Add PostgreSQL apt repository
|
||||||
# We need a version of pg_dump that can handle the version of
|
# We need a version of pg_dump that can handle the version of
|
||||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||||
@@ -660,7 +627,7 @@ jobs:
|
|||||||
PGPASSWORD: postgres
|
PGPASSWORD: postgres
|
||||||
PGDATABASE: postgres
|
PGDATABASE: postgres
|
||||||
- name: "Upload schema differences"
|
- name: "Upload schema differences"
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||||
with:
|
with:
|
||||||
name: Schema dumps
|
name: Schema dumps
|
||||||
@@ -691,20 +658,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout synapse codebase
|
- name: Checkout synapse codebase
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -727,13 +692,11 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- run: cargo test
|
- run: cargo test
|
||||||
|
|
||||||
@@ -747,13 +710,13 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
|
|
||||||
- run: cargo bench --no-run
|
- run: cargo bench --no-run
|
||||||
|
|
||||||
|
|||||||
49
.github/workflows/triage_labelled.yml
vendored
49
.github/workflows/triage_labelled.yml
vendored
@@ -6,26 +6,39 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
move_needs_info:
|
move_needs_info:
|
||||||
|
name: Move X-Needs-Info on the triage board
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: >
|
if: >
|
||||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
env:
|
|
||||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
PROJECT_OWNER: matrix-org
|
|
||||||
# Backend issue triage board.
|
|
||||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
|
||||||
PROJECT_NUMBER: 67
|
|
||||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
|
||||||
# This field is case-sensitive.
|
|
||||||
TARGET_STATUS: Needs info
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/add-to-project@280af8ae1f83a494cfad2cb10f02f6d13529caa9 # main (v1.0.2 + 10 commits)
|
||||||
|
id: add_project
|
||||||
with:
|
with:
|
||||||
# Only clone the script file we care about, instead of the whole repo.
|
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
- name: Set status
|
||||||
- name: Ensure issue exists on the board, then set Status
|
env:
|
||||||
run: .ci/scripts/triage_labelled_issue.sh
|
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh api graphql -f query='
|
||||||
|
mutation(
|
||||||
|
$project: ID!
|
||||||
|
$item: ID!
|
||||||
|
$fieldid: ID!
|
||||||
|
$columnid: String!
|
||||||
|
) {
|
||||||
|
updateProjectV2ItemFieldValue(
|
||||||
|
input: {
|
||||||
|
projectId: $project
|
||||||
|
itemId: $item
|
||||||
|
fieldId: $fieldid
|
||||||
|
value: {
|
||||||
|
singleSelectOptionId: $columnid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
projectV2Item {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
||||||
|
|||||||
39
.github/workflows/twisted_trunk.yml
vendored
39
.github/workflows/twisted_trunk.yml
vendored
@@ -20,9 +20,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
check_repo:
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||||
@@ -43,13 +40,11 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
with:
|
with:
|
||||||
@@ -70,14 +65,12 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
with:
|
with:
|
||||||
@@ -108,22 +101,20 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# We're using bookworm because that's what Debian oldstable is at the time of writing.
|
# We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
|
||||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||||
image: matrixdotorg/sytest-synapse:bookworm
|
image: matrixdotorg/sytest-synapse:bullseye
|
||||||
volumes:
|
volumes:
|
||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||||
with:
|
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||||
@@ -147,7 +138,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -175,14 +166,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v4 for synapse
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -217,7 +208,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -47,7 +47,6 @@ __pycache__/
|
|||||||
/.idea/
|
/.idea/
|
||||||
/.ropeproject/
|
/.ropeproject/
|
||||||
/.vscode/
|
/.vscode/
|
||||||
/.zed/
|
|
||||||
|
|
||||||
# build products
|
# build products
|
||||||
!/.coveragerc
|
!/.coveragerc
|
||||||
|
|||||||
@@ -1,6 +1 @@
|
|||||||
# Unstable options are only available on a nightly toolchain and must be opted into
|
|
||||||
unstable_features = true
|
|
||||||
|
|
||||||
# `group_imports` is an unstable option that requires nightly Rust toolchain. Tracked by
|
|
||||||
# https://github.com/rust-lang/rustfmt/issues/5083
|
|
||||||
group_imports = "StdExternalCrate"
|
group_imports = "StdExternalCrate"
|
||||||
|
|||||||
1274
CHANGES.md
1274
CHANGES.md
File diff suppressed because it is too large
Load Diff
1420
Cargo.lock
generated
1420
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
251
README.rst
251
README.rst
@@ -7,48 +7,171 @@
|
|||||||
|
|
||||||
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
|
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
|
||||||
implementation, written and maintained by `Element <https://element.io>`_.
|
implementation, written and maintained by `Element <https://element.io>`_.
|
||||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for secure and
|
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
||||||
interoperable real-time communications. You can directly run and manage the
|
secure and interoperable real time communications. You can directly run
|
||||||
source code in this repository, available under an AGPL license (or
|
and manage the source code in this repository, available under an AGPL
|
||||||
alternatively under a commercial license from Element).
|
license (or alternatively under a commercial license from Element).
|
||||||
|
There is no support provided by Element unless you have a
|
||||||
|
subscription from Element.
|
||||||
|
|
||||||
There is no support provided by Element unless you have a subscription from
|
Subscription
|
||||||
Element.
|
============
|
||||||
|
|
||||||
🚀 Getting started
|
For those that need an enterprise-ready solution, Element
|
||||||
==================
|
Server Suite (ESS) is `available via subscription <https://element.io/pricing>`_.
|
||||||
|
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
||||||
|
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
||||||
|
giving admins the power to easily manage an organization-wide
|
||||||
|
deployment. It includes advanced identity management, auditing,
|
||||||
|
moderation and data retention options as well as Long Term Support and
|
||||||
|
SLAs. ESS can be used to support any Matrix-based frontend client.
|
||||||
|
|
||||||
This component is developed and maintained by `Element <https://element.io>`_.
|
.. contents::
|
||||||
It gets shipped as part of the **Element Server Suite (ESS)** which provides the
|
|
||||||
official means of deployment.
|
|
||||||
|
|
||||||
ESS is a Matrix distribution from Element with focus on quality and ease of use.
|
🛠️ Installing and configuration
|
||||||
It ships a full Matrix stack tailored to the respective use case.
|
===============================
|
||||||
|
|
||||||
There are three editions of ESS:
|
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
||||||
|
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
||||||
|
<https://element-hq.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
|
||||||
|
|
||||||
- `ESS Community <https://github.com/element-hq/ess-helm>`_ - the free Matrix
|
.. _federation:
|
||||||
distribution from Element tailored to small-/mid-scale, non-commercial
|
|
||||||
community use cases
|
Synapse has a variety of `config options
|
||||||
- `ESS Pro <https://element.io/server-suite>`_ - the commercial Matrix
|
<https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
||||||
distribution from Element for professional use
|
which can be used to customise its behaviour after installation.
|
||||||
- `ESS TI-M <https://element.io/server-suite/ti-messenger>`_ - a special version
|
There are additional details on how to `configure Synapse for federation here
|
||||||
of ESS Pro focused on the requirements of TI-Messenger Pro and ePA as
|
<https://element-hq.github.io/synapse/latest/federate.html>`_.
|
||||||
specified by the German National Digital Health Agency Gematik
|
|
||||||
|
.. _reverse-proxy:
|
||||||
|
|
||||||
|
Using a reverse proxy with Synapse
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
It is recommended to put a reverse proxy such as
|
||||||
|
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||||
|
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||||
|
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
||||||
|
`HAProxy <https://www.haproxy.org/>`_ or
|
||||||
|
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
||||||
|
doing so is that it means that you can expose the default https port (443) to
|
||||||
|
Matrix clients without needing to run Synapse with root privileges.
|
||||||
|
For information on configuring one, see `the reverse proxy docs
|
||||||
|
<https://element-hq.github.io/synapse/latest/reverse_proxy.html>`_.
|
||||||
|
|
||||||
|
Upgrading an existing Synapse
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
The instructions for upgrading Synapse are in `the upgrade notes`_.
|
||||||
|
Please check these instructions as upgrading may require extra steps for some
|
||||||
|
versions of Synapse.
|
||||||
|
|
||||||
|
.. _the upgrade notes: https://element-hq.github.io/synapse/develop/upgrade.html
|
||||||
|
|
||||||
|
|
||||||
🛠️ Standalone installation and configuration
|
Platform dependencies
|
||||||
============================================
|
---------------------
|
||||||
|
|
||||||
The Synapse documentation describes `options for installing Synapse standalone
|
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
||||||
<https://element-hq.github.io/synapse/latest/setup/installation.html>`_. See
|
and aims to follow supported upstream versions. See the
|
||||||
below for more useful documentation links.
|
`deprecation policy <https://element-hq.github.io/synapse/latest/deprecation_policy.html>`_
|
||||||
|
for more details.
|
||||||
|
|
||||||
- `Synapse configuration options <https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
|
||||||
- `Synapse configuration for federation <https://element-hq.github.io/synapse/latest/federate.html>`_
|
|
||||||
- `Using a reverse proxy with Synapse <https://element-hq.github.io/synapse/latest/reverse_proxy.html>`_
|
|
||||||
- `Upgrading Synapse <https://element-hq.github.io/synapse/develop/upgrade.html>`_
|
|
||||||
|
|
||||||
|
Security note
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
||||||
|
repository endpoints`_.
|
||||||
|
|
||||||
|
.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
|
||||||
|
|
||||||
|
Whilst we make a reasonable effort to mitigate against XSS attacks (for
|
||||||
|
instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
|
||||||
|
domain hosting other web applications. This especially applies to sharing
|
||||||
|
the domain with Matrix web clients and other sensitive applications like
|
||||||
|
webmail. See
|
||||||
|
https://developer.github.com/changes/2014-04-25-user-content-security for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
.. _CSP: https://github.com/matrix-org/synapse/pull/1021
|
||||||
|
|
||||||
|
Ideally, the homeserver should not simply be on a different subdomain, but on
|
||||||
|
a completely different `registered domain`_ (also known as top-level site or
|
||||||
|
eTLD+1). This is because `some attacks`_ are still possible as long as the two
|
||||||
|
applications share the same registered domain.
|
||||||
|
|
||||||
|
.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
|
||||||
|
|
||||||
|
.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
|
||||||
|
|
||||||
|
To illustrate this with an example, if your Element Web or other sensitive web
|
||||||
|
application is hosted on ``A.example1.com``, you should ideally host Synapse on
|
||||||
|
``example2.com``. Some amount of protection is offered by hosting on
|
||||||
|
``B.example1.com`` instead, so this is also acceptable in some scenarios.
|
||||||
|
However, you should *not* host your Synapse on ``A.example1.com``.
|
||||||
|
|
||||||
|
Note that all of the above refers exclusively to the domain used in Synapse's
|
||||||
|
``public_baseurl`` setting. In particular, it has no bearing on the domain
|
||||||
|
mentioned in MXIDs hosted on that server.
|
||||||
|
|
||||||
|
Following this advice ensures that even if an XSS is found in Synapse, the
|
||||||
|
impact to other applications will be minimal.
|
||||||
|
|
||||||
|
|
||||||
|
🧪 Testing a new installation
|
||||||
|
=============================
|
||||||
|
|
||||||
|
The easiest way to try out your new Synapse installation is by connecting to it
|
||||||
|
from a web client.
|
||||||
|
|
||||||
|
Unless you are running a test instance of Synapse on your local machine, in
|
||||||
|
general, you will need to enable TLS support before you can successfully
|
||||||
|
connect from a client: see
|
||||||
|
`TLS certificates <https://element-hq.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
||||||
|
|
||||||
|
An easy way to get started is to login or register via Element at
|
||||||
|
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||||
|
You will need to change the server you are logging into from ``matrix.org``
|
||||||
|
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||||
|
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||||
|
If you prefer to use another client, refer to our
|
||||||
|
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
||||||
|
|
||||||
|
If all goes well you should at least be able to log in, create a room, and
|
||||||
|
start sending messages.
|
||||||
|
|
||||||
|
.. _`client-user-reg`:
|
||||||
|
|
||||||
|
Registering a new user from a client
|
||||||
|
------------------------------------
|
||||||
|
|
||||||
|
By default, registration of new users via Matrix clients is disabled. To enable
|
||||||
|
it:
|
||||||
|
|
||||||
|
1. In the
|
||||||
|
`registration config section <https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
|
||||||
|
set ``enable_registration: true`` in ``homeserver.yaml``.
|
||||||
|
2. Then **either**:
|
||||||
|
|
||||||
|
a. set up a `CAPTCHA <https://element-hq.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
|
||||||
|
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
|
||||||
|
|
||||||
|
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
||||||
|
the public internet. Without it, anyone can freely register accounts on your homeserver.
|
||||||
|
This can be exploited by attackers to create spambots targeting the rest of the Matrix
|
||||||
|
federation.
|
||||||
|
|
||||||
|
Your new user name will be formed partly from the ``server_name``, and partly
|
||||||
|
from a localpart you specify when you create the account. Your name will take
|
||||||
|
the form of::
|
||||||
|
|
||||||
|
@localpart:my.domain.name
|
||||||
|
|
||||||
|
(pronounced "at localpart on my dot domain dot name").
|
||||||
|
|
||||||
|
As when logging in, you will need to specify a "Custom server". Specify your
|
||||||
|
desired ``localpart`` in the 'User name' box.
|
||||||
|
|
||||||
🎯 Troubleshooting and support
|
🎯 Troubleshooting and support
|
||||||
==============================
|
==============================
|
||||||
@@ -60,7 +183,7 @@ Enterprise quality support for Synapse including SLAs is available as part of an
|
|||||||
`Element Server Suite (ESS) <https://element.io/pricing>`_ subscription.
|
`Element Server Suite (ESS) <https://element.io/pricing>`_ subscription.
|
||||||
|
|
||||||
If you are an existing ESS subscriber then you can raise a `support request <https://ems.element.io/support>`_
|
If you are an existing ESS subscriber then you can raise a `support request <https://ems.element.io/support>`_
|
||||||
and access the `Element product documentation <https://docs.element.io>`_.
|
and access the `knowledge base <https://ems-docs.element.io>`_.
|
||||||
|
|
||||||
🤝 Community support
|
🤝 Community support
|
||||||
--------------------
|
--------------------
|
||||||
@@ -79,6 +202,35 @@ issues for support requests, only for bug reports and feature requests.
|
|||||||
.. |docs| replace:: ``docs``
|
.. |docs| replace:: ``docs``
|
||||||
.. _docs: docs
|
.. _docs: docs
|
||||||
|
|
||||||
|
🪪 Identity Servers
|
||||||
|
===================
|
||||||
|
|
||||||
|
Identity servers have the job of mapping email addresses and other 3rd Party
|
||||||
|
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
||||||
|
before creating that mapping.
|
||||||
|
|
||||||
|
**They are not where accounts or credentials are stored - these live on home
|
||||||
|
servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
|
||||||
|
|
||||||
|
This process is very security-sensitive, as there is obvious risk of spam if it
|
||||||
|
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
||||||
|
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
||||||
|
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
||||||
|
the role of managing trusted identity in the Matrix ecosystem is farmed out to
|
||||||
|
a cluster of known trusted ecosystem partners, who run 'Matrix Identity
|
||||||
|
Servers' such as `Sydent <https://github.com/matrix-org/sydent>`_, whose role
|
||||||
|
is purely to authenticate and track 3PID logins and publish end-user public
|
||||||
|
keys.
|
||||||
|
|
||||||
|
You can host your own copy of Sydent, but this will prevent you reaching other
|
||||||
|
users in the Matrix ecosystem via their email address, and prevent them finding
|
||||||
|
you. We therefore recommend that you use one of the centralised identity servers
|
||||||
|
at ``https://matrix.org`` or ``https://vector.im`` for now.
|
||||||
|
|
||||||
|
To reiterate: the Identity server will only be used if you choose to associate
|
||||||
|
an email address with your account, or send an invite to another user via their
|
||||||
|
email address.
|
||||||
|
|
||||||
|
|
||||||
🛠️ Development
|
🛠️ Development
|
||||||
==============
|
==============
|
||||||
@@ -86,9 +238,9 @@ issues for support requests, only for bug reports and feature requests.
|
|||||||
We welcome contributions to Synapse from the community!
|
We welcome contributions to Synapse from the community!
|
||||||
The best place to get started is our
|
The best place to get started is our
|
||||||
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||||
This is part of our broader `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
||||||
information for Synapse developers as well as Synapse administrators.
|
|
||||||
|
|
||||||
|
information for Synapse developers as well as Synapse administrators.
|
||||||
Developers might be particularly interested in:
|
Developers might be particularly interested in:
|
||||||
|
|
||||||
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
||||||
@@ -101,29 +253,16 @@ Alongside all that, join our developer community on Matrix:
|
|||||||
Copyright and Licensing
|
Copyright and Licensing
|
||||||
=======================
|
=======================
|
||||||
|
|
||||||
| Copyright 2014–2017 OpenMarket Ltd
|
Copyright 2014-2017 OpenMarket Ltd
|
||||||
| Copyright 2017 Vector Creations Ltd
|
Copyright 2017 Vector Creations Ltd
|
||||||
| Copyright 2017–2025 New Vector Ltd
|
Copyright 2017-2025 New Vector Ltd
|
||||||
| Copyright 2025 Element Creations Ltd
|
|
||||||
|
|
||||||
This software is dual-licensed by Element Creations Ltd (Element). It can be
|
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
|
||||||
used either:
|
|
||||||
|
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
|
||||||
(1) for free under the terms of the GNU Affero General Public License (as
|
|
||||||
published by the Free Software Foundation, either version 3 of the License,
|
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
|
||||||
or (at your option) any later version); OR
|
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
||||||
|
|
||||||
(2) under the terms of a paid-for Element Commercial License agreement between
|
|
||||||
you and Element (the terms of which may vary depending on what you and
|
|
||||||
Element have agreed to).
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software distributed
|
|
||||||
under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
|
||||||
CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the
|
|
||||||
specific language governing permissions and limitations under the Licenses.
|
|
||||||
|
|
||||||
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase
|
|
||||||
an Element commercial license for this software.
|
|
||||||
|
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||||
|
|||||||
@@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
from typing import Any
|
from typing import Any, Dict
|
||||||
|
|
||||||
from packaging.specifiers import SpecifierSet
|
from packaging.specifiers import SpecifierSet
|
||||||
from setuptools_rust import Binding, RustExtension
|
from setuptools_rust import Binding, RustExtension
|
||||||
|
|
||||||
|
|
||||||
def build(setup_kwargs: dict[str, Any]) -> None:
|
def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
||||||
|
|
||||||
@@ -19,20 +19,20 @@ def build(setup_kwargs: dict[str, Any]) -> None:
|
|||||||
# This flag is a no-op in the latest versions. Instead, we need to
|
# This flag is a no-op in the latest versions. Instead, we need to
|
||||||
# specify this in the `bdist_wheel` config below.
|
# specify this in the `bdist_wheel` config below.
|
||||||
py_limited_api=True,
|
py_limited_api=True,
|
||||||
# We always build in release mode, as we can't distinguish
|
# We force always building in release mode, as we can't tell the
|
||||||
# between using `poetry` in development vs production.
|
# difference between using `poetry` in development vs production.
|
||||||
debug=False,
|
debug=False,
|
||||||
)
|
)
|
||||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
||||||
setup_kwargs["zip_safe"] = False
|
setup_kwargs["zip_safe"] = False
|
||||||
|
|
||||||
# We look up the minimum supported Python version with
|
# We lookup the minimum supported python version by looking at
|
||||||
# `python_requires` (e.g. ">=3.10.0,<4.0.0") and finding the first Python
|
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first python
|
||||||
# version that matches. We then convert that into the `py_limited_api` form,
|
# version that matches. We then convert that into the `py_limited_api` form,
|
||||||
# e.g. cp310 for Python 3.10.
|
# e.g. cp39 for python 3.9.
|
||||||
py_limited_api: str
|
py_limited_api: str
|
||||||
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
||||||
for minor_version in itertools.count(start=10):
|
for minor_version in itertools.count(start=8):
|
||||||
if f"3.{minor_version}.0" in python_bounds:
|
if f"3.{minor_version}.0" in python_bounds:
|
||||||
py_limited_api = f"cp3{minor_version}"
|
py_limited_api = f"cp3{minor_version}"
|
||||||
break
|
break
|
||||||
|
|||||||
1
changelog.d/18133.misc
Normal file
1
changelog.d/18133.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Disable statement timeout during room purge.
|
||||||
1
changelog.d/18232.feature
Normal file
1
changelog.d/18232.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add `passthrough_authorization_parameters` in OIDC configuration to allow to pass parameters to the authorization grant URL.
|
||||||
1
changelog.d/18294.docker
Normal file
1
changelog.d/18294.docker
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Optimize the build of the complement-synapse image.
|
||||||
1
changelog.d/18334.bugfix
Normal file
1
changelog.d/18334.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix `force_tracing_for_users` config when using delegated auth.
|
||||||
@@ -1 +0,0 @@
|
|||||||
Group together dependabot update PRs to reduce the review load.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix `HomeServer.shutdown()` failing if the homeserver hasn't been setup yet.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix sliding sync performance slow down for long lived connections.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Respond with useful error codes with `Content-Length` header/s are invalid.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix `HomeServer.shutdown()` failing if the homeserver failed to `start`.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Switch the build backend from `poetry-core` to `maturin`.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Raise the limit for concurrently-open non-security @dependabot PRs from 5 to 10.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Remove the "Updates to locked dependencies" section from the changelog due to lack of use and the maintenance burden.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Require 14 days to pass before pulling in general dependency updates to help mitigate upstream supply chain attacks.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add `memberships` endpoint to the admin API. This is useful for forensics and T&S purpose.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Drop the broken netlify documentation workflow until a new one is implemented.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix bug where `Duration` was logged incorrectly.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add an admin API for retrieving a paginated list of quarantined media.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Document the importance of `public_baseurl` when configuring OpenID Connect authentication.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix bug introduced in 1.143.0 that broke support for versions of `zope-interface` older than 6.2.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Don't include debug logs in `Clock` unless explicitly enabled.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Use `uv` to test olddeps to ensure all transitive dependencies use minimum versions.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Unpin the version of Rust we use to build Synapse wheels (was 1.82.0) now that MacOS support has been dropped.
|
|
||||||
@@ -33,6 +33,7 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from http import TwistedHttpClient
|
from http import TwistedHttpClient
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import urlparse
|
import urlparse
|
||||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
||||||
@@ -725,7 +726,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
method,
|
method,
|
||||||
path,
|
path,
|
||||||
data=None,
|
data=None,
|
||||||
query_params: dict | None = None,
|
query_params: Optional[dict] = None,
|
||||||
alt_text=None,
|
alt_text=None,
|
||||||
):
|
):
|
||||||
"""Runs an HTTP request and pretty prints the output.
|
"""Runs an HTTP request and pretty prints the output.
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
import json
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
@@ -89,7 +90,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return json.loads(body)
|
return json.loads(body)
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict: dict | None = None):
|
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||||
"""Wrapper of _create_request to issue a PUT request"""
|
"""Wrapper of _create_request to issue a PUT request"""
|
||||||
headers_dict = headers_dict or {}
|
headers_dict = headers_dict or {}
|
||||||
|
|
||||||
@@ -100,7 +101,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict: dict | None = None):
|
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
||||||
"""Wrapper of _create_request to issue a GET request"""
|
"""Wrapper of _create_request to issue a GET request"""
|
||||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
||||||
|
|
||||||
@@ -112,7 +113,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
data=None,
|
data=None,
|
||||||
qparams=None,
|
qparams=None,
|
||||||
jsonreq=True,
|
jsonreq=True,
|
||||||
headers: dict | None = None,
|
headers: Optional[dict] = None,
|
||||||
):
|
):
|
||||||
headers = headers or {}
|
headers = headers or {}
|
||||||
|
|
||||||
@@ -137,7 +138,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(
|
def _create_request(
|
||||||
self, method, url, producer=None, headers_dict: dict | None = None
|
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
||||||
):
|
):
|
||||||
"""Creates and sends a request to the given url"""
|
"""Creates and sends a request to the given url"""
|
||||||
headers_dict = headers_dict or {}
|
headers_dict = headers_dict or {}
|
||||||
|
|||||||
@@ -220,24 +220,29 @@
|
|||||||
"yBucketBound": "auto"
|
"yBucketBound": "auto"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
|
||||||
"uid": "${DS_PROMETHEUS}",
|
|
||||||
"type": "prometheus"
|
|
||||||
},
|
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
"dashLength": 10,
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"description": "",
|
||||||
"fieldConfig": {
|
"fieldConfig": {
|
||||||
"defaults": {
|
"defaults": {
|
||||||
"links": []
|
"links": []
|
||||||
},
|
},
|
||||||
"overrides": []
|
"overrides": []
|
||||||
},
|
},
|
||||||
|
"fill": 0,
|
||||||
|
"fillGradient": 0,
|
||||||
"gridPos": {
|
"gridPos": {
|
||||||
"h": 9,
|
"h": 9,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 12,
|
"x": 12,
|
||||||
"y": 1
|
"y": 1
|
||||||
},
|
},
|
||||||
|
"hiddenSeries": false,
|
||||||
"id": 152,
|
"id": 152,
|
||||||
"legend": {
|
"legend": {
|
||||||
"avg": false,
|
"avg": false,
|
||||||
@@ -250,81 +255,71 @@
|
|||||||
"values": false
|
"values": false
|
||||||
},
|
},
|
||||||
"lines": true,
|
"lines": true,
|
||||||
|
"linewidth": 0,
|
||||||
|
"links": [],
|
||||||
"nullPointMode": "connected",
|
"nullPointMode": "connected",
|
||||||
"options": {
|
"options": {
|
||||||
"alertThreshold": true
|
"alertThreshold": true
|
||||||
},
|
},
|
||||||
"paceLength": 10,
|
"paceLength": 10,
|
||||||
"pluginVersion": "10.4.3",
|
"percentage": false,
|
||||||
|
"pluginVersion": "9.2.2",
|
||||||
"pointradius": 5,
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
"renderer": "flot",
|
"renderer": "flot",
|
||||||
"seriesOverrides": [
|
"seriesOverrides": [
|
||||||
{
|
{
|
||||||
"alias": "Avg",
|
"alias": "Avg",
|
||||||
"fill": 0,
|
"fill": 0,
|
||||||
"linewidth": 3,
|
"linewidth": 3
|
||||||
"$$hashKey": "object:48"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "99%",
|
"alias": "99%",
|
||||||
"color": "#C4162A",
|
"color": "#C4162A",
|
||||||
"fillBelowTo": "90%",
|
"fillBelowTo": "90%"
|
||||||
"$$hashKey": "object:49"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "90%",
|
"alias": "90%",
|
||||||
"color": "#FF7383",
|
"color": "#FF7383",
|
||||||
"fillBelowTo": "75%",
|
"fillBelowTo": "75%"
|
||||||
"$$hashKey": "object:50"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "75%",
|
"alias": "75%",
|
||||||
"color": "#FFEE52",
|
"color": "#FFEE52",
|
||||||
"fillBelowTo": "50%",
|
"fillBelowTo": "50%"
|
||||||
"$$hashKey": "object:51"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "50%",
|
"alias": "50%",
|
||||||
"color": "#73BF69",
|
"color": "#73BF69",
|
||||||
"fillBelowTo": "25%",
|
"fillBelowTo": "25%"
|
||||||
"$$hashKey": "object:52"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "25%",
|
"alias": "25%",
|
||||||
"color": "#1F60C4",
|
"color": "#1F60C4",
|
||||||
"fillBelowTo": "5%",
|
"fillBelowTo": "5%"
|
||||||
"$$hashKey": "object:53"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "5%",
|
"alias": "5%",
|
||||||
"lines": false,
|
"lines": false
|
||||||
"$$hashKey": "object:54"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "Average",
|
"alias": "Average",
|
||||||
"color": "rgb(255, 255, 255)",
|
"color": "rgb(255, 255, 255)",
|
||||||
"lines": true,
|
"lines": true,
|
||||||
"linewidth": 3,
|
"linewidth": 3
|
||||||
"$$hashKey": "object:55"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "Local events being persisted",
|
"alias": "Events",
|
||||||
"color": "#96d98D",
|
|
||||||
"points": true,
|
|
||||||
"yaxis": 2,
|
|
||||||
"zindex": -3,
|
|
||||||
"$$hashKey": "object:56"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$$hashKey": "object:329",
|
|
||||||
"color": "#B877D9",
|
"color": "#B877D9",
|
||||||
"alias": "All events being persisted",
|
"hideTooltip": true,
|
||||||
"points": true,
|
"points": true,
|
||||||
"yaxis": 2,
|
"yaxis": 2,
|
||||||
"zindex": -3
|
"zindex": -3
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"spaceLength": 10,
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -389,20 +384,7 @@
|
|||||||
},
|
},
|
||||||
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
||||||
"legendFormat": "Average",
|
"legendFormat": "Average",
|
||||||
"refId": "H",
|
"refId": "H"
|
||||||
"editorMode": "code",
|
|
||||||
"range": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": {
|
|
||||||
"uid": "${DS_PROMETHEUS}"
|
|
||||||
},
|
|
||||||
"expr": "sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
|
||||||
"hide": false,
|
|
||||||
"instant": false,
|
|
||||||
"legendFormat": "Local events being persisted",
|
|
||||||
"refId": "E",
|
|
||||||
"editorMode": "code"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -411,9 +393,8 @@
|
|||||||
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
|
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
|
||||||
"hide": false,
|
"hide": false,
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "All events being persisted",
|
"legendFormat": "Events",
|
||||||
"refId": "I",
|
"refId": "E"
|
||||||
"editorMode": "code"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"thresholds": [
|
"thresholds": [
|
||||||
@@ -447,9 +428,7 @@
|
|||||||
"xaxis": {
|
"xaxis": {
|
||||||
"mode": "time",
|
"mode": "time",
|
||||||
"show": true,
|
"show": true,
|
||||||
"values": [],
|
"values": []
|
||||||
"name": null,
|
|
||||||
"buckets": null
|
|
||||||
},
|
},
|
||||||
"yaxes": [
|
"yaxes": [
|
||||||
{
|
{
|
||||||
@@ -471,20 +450,7 @@
|
|||||||
],
|
],
|
||||||
"yaxis": {
|
"yaxis": {
|
||||||
"align": false
|
"align": false
|
||||||
},
|
}
|
||||||
"bars": false,
|
|
||||||
"dashes": false,
|
|
||||||
"description": "",
|
|
||||||
"fill": 0,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"hiddenSeries": false,
|
|
||||||
"linewidth": 0,
|
|
||||||
"percentage": false,
|
|
||||||
"points": false,
|
|
||||||
"stack": false,
|
|
||||||
"steppedLine": false,
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
@@ -2166,10 +2132,10 @@
|
|||||||
"datasource": {
|
"datasource": {
|
||||||
"uid": "${DS_PROMETHEUS}"
|
"uid": "${DS_PROMETHEUS}"
|
||||||
},
|
},
|
||||||
"expr": "rate(synapse_storage_events_persisted_events_sep_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
"expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
"intervalFactor": 2,
|
"intervalFactor": 2,
|
||||||
"legendFormat": "{{origin_type}}",
|
"legendFormat": "{{type}}",
|
||||||
"refId": "D"
|
"refId": "D"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -2254,7 +2220,7 @@
|
|||||||
"datasource": {
|
"datasource": {
|
||||||
"uid": "${DS_PROMETHEUS}"
|
"uid": "${DS_PROMETHEUS}"
|
||||||
},
|
},
|
||||||
"expr": "sum by(type) (rate(synapse_storage_events_persisted_events_sep_total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
"expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"intervalFactor": 2,
|
"intervalFactor": 2,
|
||||||
@@ -2294,6 +2260,99 @@
|
|||||||
"align": false
|
"align": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {
|
||||||
|
"irc-freenode (local)": "#EAB839"
|
||||||
|
},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"decimals": 1,
|
||||||
|
"fill": 1,
|
||||||
|
"fillGradient": 0,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 7,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 44
|
||||||
|
},
|
||||||
|
"hiddenSeries": false,
|
||||||
|
"id": 44,
|
||||||
|
"legend": {
|
||||||
|
"alignAsTable": true,
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"hideEmpty": true,
|
||||||
|
"hideZero": true,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"links": [],
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"options": {
|
||||||
|
"alertThreshold": true
|
||||||
|
},
|
||||||
|
"percentage": false,
|
||||||
|
"pluginVersion": "9.2.2",
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 2,
|
||||||
|
"legendFormat": "{{origin_entity}} ({{origin_type}})",
|
||||||
|
"refId": "A",
|
||||||
|
"step": 20
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeRegions": [],
|
||||||
|
"title": "Events/s by Origin",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": false,
|
||||||
|
"sort": 2,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"mode": "time",
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "hertz",
|
||||||
|
"logBase": 1,
|
||||||
|
"min": "0",
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"logBase": 1,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
"bars": false,
|
"bars": false,
|
||||||
@@ -4303,7 +4362,7 @@
|
|||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "{{origin_server_name}} ",
|
"legendFormat": "{{server_name}} ",
|
||||||
"range": true,
|
"range": true,
|
||||||
"refId": "A"
|
"refId": "A"
|
||||||
}
|
}
|
||||||
@@ -4425,7 +4484,7 @@
|
|||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "{{destination_server_name}}",
|
"legendFormat": "{{server_name}}",
|
||||||
"range": true,
|
"range": true,
|
||||||
"refId": "A"
|
"refId": "A"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ import datetime
|
|||||||
import html
|
import html
|
||||||
import json
|
import json
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import pydot
|
import pydot
|
||||||
|
|
||||||
@@ -32,7 +33,7 @@ def make_name(pdu_id: str, origin: str) -> str:
|
|||||||
return f"{pdu_id}@{origin}"
|
return f"{pdu_id}@{origin}"
|
||||||
|
|
||||||
|
|
||||||
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||||
"""
|
"""
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
Generate a dot and SVG file for a graph of events in the room based on the
|
||||||
topological ordering by querying a homeserver.
|
topological ordering by querying a homeserver.
|
||||||
@@ -44,10 +45,6 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
|||||||
colors = {"red", "green", "blue", "yellow", "purple"}
|
colors = {"red", "green", "blue", "yellow", "purple"}
|
||||||
|
|
||||||
for pdu in pdus:
|
for pdu in pdus:
|
||||||
# TODO: The "origin" field has since been removed from events generated
|
|
||||||
# by Synapse. We should consider removing it here as well but since this
|
|
||||||
# is part of `contrib/`, it is left for the community to revise and ensure things
|
|
||||||
# still work correctly.
|
|
||||||
origins.add(pdu.get("origin"))
|
origins.add(pdu.get("origin"))
|
||||||
|
|
||||||
color_map = {color: color for color in colors if color in origins}
|
color_map = {color: color for color in colors if color in origins}
|
||||||
@@ -126,7 +123,7 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
|||||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
def get_pdus(host: str, room: str) -> list[dict]:
|
def get_pdus(host: str, room: str) -> List[dict]:
|
||||||
transaction = json.loads(
|
transaction = json.loads(
|
||||||
urllib.request.urlopen(
|
urllib.request.urlopen(
|
||||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
||||||
|
|||||||
@@ -44,3 +44,31 @@ groups:
|
|||||||
###
|
###
|
||||||
### End of 'Prometheus Console Only' rules block
|
### End of 'Prometheus Console Only' rules block
|
||||||
###
|
###
|
||||||
|
|
||||||
|
|
||||||
|
###
|
||||||
|
### Grafana Only
|
||||||
|
### The following rules are only needed if you use the Grafana dashboard
|
||||||
|
### in contrib/grafana/synapse.json
|
||||||
|
###
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
|
||||||
|
labels:
|
||||||
|
type: remote
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
|
||||||
|
labels:
|
||||||
|
type: local
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
|
||||||
|
labels:
|
||||||
|
type: bridges
|
||||||
|
|
||||||
|
- record: synapse_storage_events_persisted_by_event_type
|
||||||
|
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
|
||||||
|
|
||||||
|
- record: synapse_storage_events_persisted_by_origin
|
||||||
|
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
|
||||||
|
###
|
||||||
|
### End of 'Grafana Only' rules block
|
||||||
|
###
|
||||||
|
|||||||
302
debian/changelog
vendored
302
debian/changelog
vendored
@@ -1,305 +1,3 @@
|
|||||||
matrix-synapse-py3 (1.144.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.144.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Dec 2025 08:30:40 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.144.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.144.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Dec 2025 09:11:19 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.143.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.143.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Nov 2025 08:44:56 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.143.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.143.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 17:36:08 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.143.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.143.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 13:08:39 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 12:25:23 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Nov 2025 09:45:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Nov 2025 10:54:42 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 17:39:11 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 16:21:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 13:20:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 29 Oct 2025 11:01:43 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Oct 2025 10:20:26 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.140.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.140.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.140.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 12:54:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* The licensing specifier has been updated to add an optional
|
|
||||||
`LicenseRef-Element-Commercial` license. The code was already licensed in
|
|
||||||
this manner - the debian metadata was just not updated to reflect it.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:17:17 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.138.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Sep 2025 12:16:14 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.137.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.137.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Aug 2025 10:23:41 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.137.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.137.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Aug 2025 10:55:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Aug 2025 13:18:03 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 12:18:52 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Aug 2025 08:13:30 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:52:01 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:13:15 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Aug 2025 13:12:28 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jul 2025 12:19:14 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Jul 2025 12:08:37 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.134.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.134.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jul 2025 14:22:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.134.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Jul 2025 11:27:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.133.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.133.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jul 2025 13:13:24 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.133.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.133.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Jun 2025 11:57:47 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.132.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.132.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jun 2025 13:16:20 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.132.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.132.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jun 2025 11:15:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.131.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.131.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Jun 2025 14:36:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.131.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.131.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 28 May 2025 10:25:44 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.130.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.130.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 May 2025 08:34:13 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.130.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.130.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 May 2025 10:44:04 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.129.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 May 2025 12:22:11 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.129.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Apr 2025 13:13:16 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.129.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Apr 2025 10:47:43 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.128.0) stable; urgency=medium
|
matrix-synapse-py3 (1.128.0) stable; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.128.0.
|
* New Synapse release 1.128.0.
|
||||||
|
|||||||
2
debian/copyright
vendored
2
debian/copyright
vendored
@@ -8,7 +8,7 @@ License: Apache-2.0
|
|||||||
|
|
||||||
Files: *
|
Files: *
|
||||||
Copyright: 2023 New Vector Ltd
|
Copyright: 2023 New Vector Ltd
|
||||||
License: AGPL-3.0-or-later or LicenseRef-Element-Commercial
|
License: AGPL-3.0-or-later
|
||||||
|
|
||||||
Files: synapse/config/saml2.py
|
Files: synapse/config/saml2.py
|
||||||
Copyright: 2015, Ericsson
|
Copyright: 2015, Ericsson
|
||||||
|
|||||||
@@ -20,8 +20,8 @@
|
|||||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||||
# in `poetry export` in the past.
|
# in `poetry export` in the past.
|
||||||
|
|
||||||
ARG DEBIAN_VERSION=trixie
|
ARG DEBIAN_VERSION=bookworm
|
||||||
ARG PYTHON_VERSION=3.13
|
ARG PYTHON_VERSION=3.12
|
||||||
ARG POETRY_VERSION=2.1.1
|
ARG POETRY_VERSION=2.1.1
|
||||||
|
|
||||||
###
|
###
|
||||||
@@ -142,10 +142,10 @@ RUN \
|
|||||||
libwebp7 \
|
libwebp7 \
|
||||||
xmlsec1 \
|
xmlsec1 \
|
||||||
libjemalloc2 \
|
libjemalloc2 \
|
||||||
|
libicu \
|
||||||
| grep '^\w' > /tmp/pkg-list && \
|
| grep '^\w' > /tmp/pkg-list && \
|
||||||
for arch in arm64 amd64; do \
|
for arch in arm64 amd64; do \
|
||||||
mkdir -p /tmp/debs-${arch} && \
|
mkdir -p /tmp/debs-${arch} && \
|
||||||
chown _apt:root /tmp/debs-${arch} && \
|
|
||||||
cd /tmp/debs-${arch} && \
|
cd /tmp/debs-${arch} && \
|
||||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
||||||
done
|
done
|
||||||
@@ -171,20 +171,20 @@ FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
|||||||
|
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url='https://github.com/element-hq/synapse'
|
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||||
LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synapse/latest/'
|
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element-Commercial'
|
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||||
|
|
||||||
|
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
|
||||||
|
# libraries to the right place, else the `COPY` won't work.
|
||||||
|
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
|
||||||
|
# already present in the runtime image.
|
||||||
|
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
||||||
|
COPY --from=builder /install /usr/local
|
||||||
# Copy the installed python packages from the builder stage.
|
|
||||||
#
|
|
||||||
# uv will generate a `.lock` file when installing packages, which we don't want
|
|
||||||
# to copy to the final image.
|
|
||||||
COPY --from=builder --exclude=.lock /install /usr/local
|
|
||||||
COPY ./docker/start.py /start.py
|
COPY ./docker/start.py /start.py
|
||||||
COPY ./docker/conf /conf
|
COPY ./docker/conf /conf
|
||||||
|
|
||||||
|
|||||||
@@ -1,67 +1,57 @@
|
|||||||
# syntax=docker/dockerfile:1-labs
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=trixie
|
ARG DEBIAN_VERSION=bookworm
|
||||||
ARG PYTHON_VERSION=3.13
|
|
||||||
ARG REDIS_VERSION=7.2
|
|
||||||
|
|
||||||
# first of all, we create a base image with dependencies which we can copy into the
|
# first of all, we create a base image with an nginx which we can copy into the
|
||||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||||
# each time.
|
# each time.
|
||||||
|
|
||||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
FROM docker.io/library/debian:${DEBIAN_VERSION}-slim AS deps_base
|
||||||
|
|
||||||
ARG DEBIAN_VERSION
|
|
||||||
ARG REDIS_VERSION
|
|
||||||
|
|
||||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
|
||||||
|
|
||||||
# The upstream redis-server deb has fewer dynamic libraries than Debian's package which makes it easier to copy later on
|
|
||||||
RUN \
|
|
||||||
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
|
||||||
chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \
|
|
||||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb ${DEBIAN_VERSION} main" | tee /etc/apt/sources.list.d/redis.list
|
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
apt-get update -qq && \
|
apt-get update -qq && \
|
||||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||||
nginx-light \
|
redis-server nginx-light
|
||||||
redis-server="6:${REDIS_VERSION}.*" redis-tools="6:${REDIS_VERSION}.*" \
|
|
||||||
# libicu is required by postgres, see `docker/complement/Dockerfile`
|
|
||||||
libicu76
|
|
||||||
|
|
||||||
RUN \
|
# Similarly, a base to copy the redis server from.
|
||||||
# remove default page
|
#
|
||||||
rm /etc/nginx/sites-enabled/default && \
|
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||||
# have nginx log to stderr/out
|
# which makes it much easier to copy (but we need to make sure we use an image
|
||||||
ln -sf /dev/stdout /var/log/nginx/access.log && \
|
# based on the same debian version as the synapse image, to make sure we get
|
||||||
ln -sf /dev/stderr /var/log/nginx/error.log
|
# the expected version of libc.
|
||||||
|
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
|
||||||
# --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache
|
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
|
||||||
uv pip install --link-mode=copy --prefix="/uv/usr/local" supervisor~=4.2
|
|
||||||
|
|
||||||
RUN mkdir -p /uv/etc/supervisor/conf.d
|
|
||||||
|
|
||||||
# now build the final image, based on the the regular Synapse docker image
|
# now build the final image, based on the the regular Synapse docker image
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
|
|
||||||
# Copy over dependencies
|
# Install supervisord with uv pip instead of apt, to avoid installing a second
|
||||||
COPY --from=deps_base --parents /usr/lib/*-linux-gnu/libicu* /
|
# copy of python.
|
||||||
COPY --from=deps_base /usr/bin/redis-server /usr/local/bin
|
# --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache
|
||||||
COPY --from=deps_base /uv /
|
# (mounted as --mount=type=cache) and the target directory.
|
||||||
|
RUN \
|
||||||
|
--mount=type=bind,from=ghcr.io/astral-sh/uv:0.6.8,source=/uv,target=/uv \
|
||||||
|
--mount=type=cache,target=/root/.cache/uv \
|
||||||
|
/uv pip install --link-mode=copy --prefix="/usr/local" supervisor~=4.2
|
||||||
|
|
||||||
|
RUN mkdir -p /etc/supervisor/conf.d
|
||||||
|
|
||||||
|
# Copy over redis and nginx
|
||||||
|
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||||
|
|
||||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||||
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
|
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
|
||||||
COPY --from=deps_base /etc/nginx /etc/nginx
|
COPY --from=deps_base /etc/nginx /etc/nginx
|
||||||
COPY --from=deps_base /var/log/nginx /var/log/nginx
|
RUN rm /etc/nginx/sites-enabled/default
|
||||||
# chown to allow non-root user to write to http-*-temp-path dirs
|
RUN mkdir /var/log/nginx /var/lib/nginx
|
||||||
COPY --from=deps_base --chown=www-data:root /var/lib/nginx /var/lib/nginx
|
RUN chown www-data /var/lib/nginx
|
||||||
|
|
||||||
|
# have nginx log to stderr/out
|
||||||
|
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
||||||
|
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
||||||
|
|
||||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||||
COPY ./docker/conf-workers/* /conf/
|
COPY ./docker/conf-workers/* /conf/
|
||||||
@@ -80,4 +70,4 @@ FROM $FROM
|
|||||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||||
# is generated by configure_workers_and_start.py.
|
# is generated by configure_workers_and_start.py.
|
||||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||||
CMD ["/healthcheck.sh"]
|
CMD /bin/sh /healthcheck.sh
|
||||||
|
|||||||
@@ -9,24 +9,24 @@
|
|||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=trixie
|
ARG DEBIAN_VERSION=bookworm
|
||||||
|
|
||||||
FROM docker.io/library/postgres:14-${DEBIAN_VERSION} AS postgres_base
|
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
||||||
|
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
# First of all, we copy postgres server from the official postgres image,
|
# First of all, we copy postgres server from the official postgres image,
|
||||||
# since for repeated rebuilds, this is much faster than apt installing
|
# since for repeated rebuilds, this is much faster than apt installing
|
||||||
# postgres each time.
|
# postgres each time.
|
||||||
|
|
||||||
# This trick only works because we use a postgres image based on the same
|
# This trick only works because (a) the Synapse image happens to have all the
|
||||||
# debian version as Synapse's docker image (so the versions of the shared
|
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||||
# libraries match). Any missing libraries need to be added to either the
|
# the same debian version as Synapse's docker image (so the versions of the
|
||||||
# Synapse image or docker/Dockerfile-workers.
|
# shared libraries match).
|
||||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||||
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
|
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
|
||||||
ENV PATH="${PATH}:/usr/lib/postgresql/14/bin"
|
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||||
ENV PGDATA=/var/lib/postgresql/data
|
ENV PGDATA=/var/lib/postgresql/data
|
||||||
|
|
||||||
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
||||||
@@ -58,4 +58,4 @@ ENTRYPOINT ["/start_for_complement.sh"]
|
|||||||
|
|
||||||
# Update the healthcheck to have a shorter check interval
|
# Update the healthcheck to have a shorter check interval
|
||||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||||
CMD ["/healthcheck.sh"]
|
CMD /bin/sh /healthcheck.sh
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ echo " Args: $*"
|
|||||||
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
||||||
|
|
||||||
function log {
|
function log {
|
||||||
d=$(printf '%(%Y-%m-%d %H:%M:%S)T,%.3s\n' ${EPOCHREALTIME/./ })
|
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||||
echo "$d $*"
|
echo "$d $*"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -54,6 +54,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
|||||||
export SYNAPSE_WORKER_TYPES="\
|
export SYNAPSE_WORKER_TYPES="\
|
||||||
event_persister:2, \
|
event_persister:2, \
|
||||||
background_worker, \
|
background_worker, \
|
||||||
|
frontend_proxy, \
|
||||||
event_creator, \
|
event_creator, \
|
||||||
user_dir, \
|
user_dir, \
|
||||||
media_repository, \
|
media_repository, \
|
||||||
@@ -64,7 +65,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
|||||||
client_reader, \
|
client_reader, \
|
||||||
appservice, \
|
appservice, \
|
||||||
pusher, \
|
pusher, \
|
||||||
device_lists:2, \
|
|
||||||
stream_writers=account_data+presence+receipts+to_device+typing"
|
stream_writers=account_data+presence+receipts+to_device+typing"
|
||||||
|
|
||||||
fi
|
fi
|
||||||
@@ -103,11 +103,12 @@ fi
|
|||||||
# Note that both the key and certificate are in PEM format (not DER).
|
# Note that both the key and certificate are in PEM format (not DER).
|
||||||
|
|
||||||
# First generate a configuration file to set up a Subject Alternative Name.
|
# First generate a configuration file to set up a Subject Alternative Name.
|
||||||
echo "\
|
cat > /conf/server.tls.conf <<EOF
|
||||||
.include /etc/ssl/openssl.cnf
|
.include /etc/ssl/openssl.cnf
|
||||||
|
|
||||||
[SAN]
|
[SAN]
|
||||||
subjectAltName=DNS:${SERVER_NAME}" > /conf/server.tls.conf
|
subjectAltName=DNS:${SERVER_NAME}
|
||||||
|
EOF
|
||||||
|
|
||||||
# Generate an RSA key
|
# Generate an RSA key
|
||||||
openssl genrsa -out /conf/server.tls.key 2048
|
openssl genrsa -out /conf/server.tls.key 2048
|
||||||
@@ -122,8 +123,8 @@ openssl x509 -req -in /conf/server.tls.csr \
|
|||||||
-out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN
|
-out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN
|
||||||
|
|
||||||
# Assert that we have a Subject Alternative Name in the certificate.
|
# Assert that we have a Subject Alternative Name in the certificate.
|
||||||
# (the test will exit with 1 here if there isn't a SAN in the certificate.)
|
# (grep will exit with 1 here if there isn't a SAN in the certificate.)
|
||||||
[[ $(openssl x509 -in /conf/server.tls.crt -noout -text) == *DNS:* ]]
|
openssl x509 -in /conf/server.tls.crt -noout -text | grep DNS:
|
||||||
|
|
||||||
export SYNAPSE_TLS_CERT=/conf/server.tls.crt
|
export SYNAPSE_TLS_CERT=/conf/server.tls.crt
|
||||||
export SYNAPSE_TLS_KEY=/conf/server.tls.key
|
export SYNAPSE_TLS_KEY=/conf/server.tls.key
|
||||||
|
|||||||
@@ -98,10 +98,6 @@ rc_delayed_event_mgmt:
|
|||||||
per_second: 9999
|
per_second: 9999
|
||||||
burst_count: 9999
|
burst_count: 9999
|
||||||
|
|
||||||
rc_room_creation:
|
|
||||||
per_second: 9999
|
|
||||||
burst_count: 9999
|
|
||||||
|
|
||||||
federation_rr_transactions_per_room_per_second: 9999
|
federation_rr_transactions_per_room_per_second: 9999
|
||||||
|
|
||||||
allow_device_name_lookup_over_federation: true
|
allow_device_name_lookup_over_federation: true
|
||||||
@@ -131,10 +127,6 @@ experimental_features:
|
|||||||
msc3983_appservice_otk_claims: true
|
msc3983_appservice_otk_claims: true
|
||||||
# Proxy key queries to exclusive ASes
|
# Proxy key queries to exclusive ASes
|
||||||
msc3984_appservice_key_query: true
|
msc3984_appservice_key_query: true
|
||||||
# Invite filtering
|
|
||||||
msc4155_enabled: true
|
|
||||||
# Thread Subscriptions
|
|
||||||
msc4306_enabled: true
|
|
||||||
|
|
||||||
server_notices:
|
server_notices:
|
||||||
system_mxid_localpart: _server
|
system_mxid_localpart: _server
|
||||||
|
|||||||
@@ -77,13 +77,6 @@ loggers:
|
|||||||
#}
|
#}
|
||||||
synapse.visibility.filtered_event_debug:
|
synapse.visibility.filtered_event_debug:
|
||||||
level: DEBUG
|
level: DEBUG
|
||||||
|
|
||||||
{#
|
|
||||||
If Synapse is under test, we don't care about seeing the "Applying schema" log
|
|
||||||
lines at the INFO level every time we run the tests (it's 100 lines of bulk)
|
|
||||||
#}
|
|
||||||
synapse.storage.prepare_database:
|
|
||||||
level: WARN
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
root:
|
root:
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/local/bin/python
|
#!/usr/bin/env python
|
||||||
#
|
#
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
#
|
#
|
||||||
@@ -65,9 +65,13 @@ from itertools import chain
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
Mapping,
|
Mapping,
|
||||||
MutableMapping,
|
MutableMapping,
|
||||||
NoReturn,
|
NoReturn,
|
||||||
|
Optional,
|
||||||
|
Set,
|
||||||
SupportsIndex,
|
SupportsIndex,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -92,7 +96,7 @@ WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
|||||||
# Watching /_matrix/media and related needs a "media" listener
|
# Watching /_matrix/media and related needs a "media" listener
|
||||||
# Stream Writers require "client" and "replication" listeners because they
|
# Stream Writers require "client" and "replication" listeners because they
|
||||||
# have to attach by instance_map to the master process and have client endpoints.
|
# have to attach by instance_map to the master process and have client endpoints.
|
||||||
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||||
"pusher": {
|
"pusher": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": [],
|
"listener_resources": [],
|
||||||
@@ -174,9 +178,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/deactivate$",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)",
|
|
||||||
"^/_matrix/client/(r0|v3)/delete_devices$",
|
|
||||||
"^/_matrix/client/versions$",
|
"^/_matrix/client/versions$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||||
@@ -193,10 +194,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
|
|
||||||
"^/_matrix/client/unstable/org.matrix.msc4140/delayed_events(/.*/restart)?$",
|
|
||||||
],
|
],
|
||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
@@ -205,7 +202,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["federation"],
|
"listener_resources": ["federation"],
|
||||||
"endpoint_patterns": [
|
"endpoint_patterns": [
|
||||||
"^/_matrix/federation/v1/version$",
|
|
||||||
"^/_matrix/federation/(v1|v2)/event/",
|
"^/_matrix/federation/(v1|v2)/event/",
|
||||||
"^/_matrix/federation/(v1|v2)/state/",
|
"^/_matrix/federation/(v1|v2)/state/",
|
||||||
"^/_matrix/federation/(v1|v2)/state_ids/",
|
"^/_matrix/federation/(v1|v2)/state_ids/",
|
||||||
@@ -268,6 +264,13 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
|
"frontend_proxy": {
|
||||||
|
"app": "synapse.app.generic_worker",
|
||||||
|
"listener_resources": ["client", "replication"],
|
||||||
|
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
|
||||||
|
"shared_extra_conf": {},
|
||||||
|
"worker_extra_conf": "",
|
||||||
|
},
|
||||||
"account_data": {
|
"account_data": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["client", "replication"],
|
"listener_resources": ["client", "replication"],
|
||||||
@@ -302,13 +305,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"device_lists": {
|
|
||||||
"app": "synapse.app.generic_worker",
|
|
||||||
"listener_resources": ["client", "replication"],
|
|
||||||
"endpoint_patterns": [],
|
|
||||||
"shared_extra_conf": {},
|
|
||||||
"worker_extra_conf": "",
|
|
||||||
},
|
|
||||||
"typing": {
|
"typing": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["client", "replication"],
|
"listener_resources": ["client", "replication"],
|
||||||
@@ -325,15 +321,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"thread_subscriptions": {
|
|
||||||
"app": "synapse.app.generic_worker",
|
|
||||||
"listener_resources": ["client", "replication"],
|
|
||||||
"endpoint_patterns": [
|
|
||||||
"^/_matrix/client/unstable/io.element.msc4306/.*",
|
|
||||||
],
|
|
||||||
"shared_extra_conf": {},
|
|
||||||
"worker_extra_conf": "",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Templates for sections that may be inserted multiple times in config files
|
# Templates for sections that may be inserted multiple times in config files
|
||||||
@@ -364,11 +351,6 @@ def error(txt: str) -> NoReturn:
|
|||||||
|
|
||||||
|
|
||||||
def flush_buffers() -> None:
|
def flush_buffers() -> None:
|
||||||
"""
|
|
||||||
Python's `print()` buffers output by default, typically waiting until ~8KB
|
|
||||||
accumulates. This method can be used to flush the buffers so we can see the output
|
|
||||||
of any print statements so far.
|
|
||||||
"""
|
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
@@ -394,18 +376,16 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
|||||||
#
|
#
|
||||||
# We use append mode in case the files have already been written to by something else
|
# We use append mode in case the files have already been written to by something else
|
||||||
# (for instance, as part of the instructions in a dockerfile).
|
# (for instance, as part of the instructions in a dockerfile).
|
||||||
exists = os.path.isfile(dst)
|
|
||||||
with open(dst, "a") as outfile:
|
with open(dst, "a") as outfile:
|
||||||
# In case the existing file doesn't end with a newline
|
# In case the existing file doesn't end with a newline
|
||||||
if exists:
|
outfile.write("\n")
|
||||||
outfile.write("\n")
|
|
||||||
|
|
||||||
outfile.write(rendered)
|
outfile.write(rendered)
|
||||||
|
|
||||||
|
|
||||||
def add_worker_roles_to_shared_config(
|
def add_worker_roles_to_shared_config(
|
||||||
shared_config: dict,
|
shared_config: dict,
|
||||||
worker_types_set: set[str],
|
worker_types_set: Set[str],
|
||||||
worker_name: str,
|
worker_name: str,
|
||||||
worker_port: int,
|
worker_port: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -424,18 +404,16 @@ def add_worker_roles_to_shared_config(
|
|||||||
# streams
|
# streams
|
||||||
instance_map = shared_config.setdefault("instance_map", {})
|
instance_map = shared_config.setdefault("instance_map", {})
|
||||||
|
|
||||||
# This is a list of the stream_writers.
|
# This is a list of the stream_writers that there can be only one of. Events can be
|
||||||
stream_writers = {
|
# sharded, and therefore doesn't belong here.
|
||||||
|
singular_stream_writers = [
|
||||||
"account_data",
|
"account_data",
|
||||||
"events",
|
|
||||||
"device_lists",
|
|
||||||
"presence",
|
"presence",
|
||||||
"receipts",
|
"receipts",
|
||||||
"to_device",
|
"to_device",
|
||||||
"typing",
|
"typing",
|
||||||
"push_rules",
|
"push_rules",
|
||||||
"thread_subscriptions",
|
]
|
||||||
}
|
|
||||||
|
|
||||||
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
||||||
# roles, check each.
|
# roles, check each.
|
||||||
@@ -445,11 +423,28 @@ def add_worker_roles_to_shared_config(
|
|||||||
if "federation_sender" in worker_types_set:
|
if "federation_sender" in worker_types_set:
|
||||||
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
||||||
|
|
||||||
|
if "event_persister" in worker_types_set:
|
||||||
|
# Event persisters write to the events stream, so we need to update
|
||||||
|
# the list of event stream writers
|
||||||
|
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
||||||
|
worker_name
|
||||||
|
)
|
||||||
|
|
||||||
|
# Map of stream writer instance names to host/ports combos
|
||||||
|
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
||||||
|
instance_map[worker_name] = {
|
||||||
|
"path": f"/run/worker.{worker_port}",
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
instance_map[worker_name] = {
|
||||||
|
"host": "localhost",
|
||||||
|
"port": worker_port,
|
||||||
|
}
|
||||||
# Update the list of stream writers. It's convenient that the name of the worker
|
# Update the list of stream writers. It's convenient that the name of the worker
|
||||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||||
# is more than one.
|
# is more than one.
|
||||||
for worker in worker_types_set:
|
for worker in worker_types_set:
|
||||||
if worker in stream_writers:
|
if worker in singular_stream_writers:
|
||||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||||
worker, []
|
worker, []
|
||||||
).append(worker_name)
|
).append(worker_name)
|
||||||
@@ -468,9 +463,9 @@ def add_worker_roles_to_shared_config(
|
|||||||
|
|
||||||
|
|
||||||
def merge_worker_template_configs(
|
def merge_worker_template_configs(
|
||||||
existing_dict: dict[str, Any] | None,
|
existing_dict: Optional[Dict[str, Any]],
|
||||||
to_be_merged_dict: dict[str, Any],
|
to_be_merged_dict: Dict[str, Any],
|
||||||
) -> dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""When given an existing dict of worker template configuration consisting with both
|
"""When given an existing dict of worker template configuration consisting with both
|
||||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
||||||
return new dict.
|
return new dict.
|
||||||
@@ -481,7 +476,7 @@ def merge_worker_template_configs(
|
|||||||
existing_dict.
|
existing_dict.
|
||||||
Returns: The newly merged together dict values.
|
Returns: The newly merged together dict values.
|
||||||
"""
|
"""
|
||||||
new_dict: dict[str, Any] = {}
|
new_dict: Dict[str, Any] = {}
|
||||||
if not existing_dict:
|
if not existing_dict:
|
||||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
||||||
new_dict = to_be_merged_dict.copy()
|
new_dict = to_be_merged_dict.copy()
|
||||||
@@ -506,8 +501,8 @@ def merge_worker_template_configs(
|
|||||||
|
|
||||||
|
|
||||||
def insert_worker_name_for_worker_config(
|
def insert_worker_name_for_worker_config(
|
||||||
existing_dict: dict[str, Any], worker_name: str
|
existing_dict: Dict[str, Any], worker_name: str
|
||||||
) -> dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Insert a given worker name into the worker's configuration dict.
|
"""Insert a given worker name into the worker's configuration dict.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -523,7 +518,7 @@ def insert_worker_name_for_worker_config(
|
|||||||
return dict_to_edit
|
return dict_to_edit
|
||||||
|
|
||||||
|
|
||||||
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
|
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
Apply multiplier(if found) by returning a new expanded list with some basic error
|
||||||
checking.
|
checking.
|
||||||
@@ -584,7 +579,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
|||||||
|
|
||||||
def split_and_strip_string(
|
def split_and_strip_string(
|
||||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
||||||
) -> list[str]:
|
) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Helper to split a string on split_char and strip whitespace from each end of each
|
Helper to split a string on split_char and strip whitespace from each end of each
|
||||||
element.
|
element.
|
||||||
@@ -609,12 +604,12 @@ def generate_base_homeserver_config() -> None:
|
|||||||
# start.py already does this for us, so just call that.
|
# start.py already does this for us, so just call that.
|
||||||
# note that this script is copied in in the official, monolith dockerfile
|
# note that this script is copied in in the official, monolith dockerfile
|
||||||
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
||||||
subprocess.run([sys.executable, "/start.py", "migrate_config"], check=True)
|
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||||
|
|
||||||
|
|
||||||
def parse_worker_types(
|
def parse_worker_types(
|
||||||
requested_worker_types: list[str],
|
requested_worker_types: List[str],
|
||||||
) -> dict[str, set[str]]:
|
) -> Dict[str, Set[str]]:
|
||||||
"""Read the desired list of requested workers and prepare the data for use in
|
"""Read the desired list of requested workers and prepare the data for use in
|
||||||
generating worker config files while also checking for potential gotchas.
|
generating worker config files while also checking for potential gotchas.
|
||||||
|
|
||||||
@@ -630,14 +625,14 @@ def parse_worker_types(
|
|||||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
# A counter of worker_base_name -> int. Used for determining the name for a given
|
||||||
# worker when generating its config file, as each worker's name is just
|
# worker when generating its config file, as each worker's name is just
|
||||||
# worker_base_name followed by instance number
|
# worker_base_name followed by instance number
|
||||||
worker_base_name_counter: dict[str, int] = defaultdict(int)
|
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
||||||
|
|
||||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
# Similar to above, but more finely grained. This is used to determine we don't have
|
||||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
||||||
worker_type_shard_counter: dict[str, int] = defaultdict(int)
|
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
||||||
|
|
||||||
# The final result of all this processing
|
# The final result of all this processing
|
||||||
dict_to_return: dict[str, set[str]] = {}
|
dict_to_return: Dict[str, Set[str]] = {}
|
||||||
|
|
||||||
# Handle any multipliers requested for given workers.
|
# Handle any multipliers requested for given workers.
|
||||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
||||||
@@ -681,7 +676,7 @@ def parse_worker_types(
|
|||||||
|
|
||||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
# Split the worker_type_string on "+", remove whitespace from ends then make
|
||||||
# the list a set so it's deduplicated.
|
# the list a set so it's deduplicated.
|
||||||
worker_types_set: set[str] = set(
|
worker_types_set: Set[str] = set(
|
||||||
split_and_strip_string(worker_type_string, "+")
|
split_and_strip_string(worker_type_string, "+")
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -740,7 +735,7 @@ def generate_worker_files(
|
|||||||
environ: Mapping[str, str],
|
environ: Mapping[str, str],
|
||||||
config_path: str,
|
config_path: str,
|
||||||
data_dir: str,
|
data_dir: str,
|
||||||
requested_worker_types: dict[str, set[str]],
|
requested_worker_types: Dict[str, Set[str]],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Read the desired workers(if any) that is passed in and generate shared
|
"""Read the desired workers(if any) that is passed in and generate shared
|
||||||
homeserver, nginx and supervisord configs.
|
homeserver, nginx and supervisord configs.
|
||||||
@@ -761,7 +756,7 @@ def generate_worker_files(
|
|||||||
# First read the original config file and extract the listeners block. Then we'll
|
# First read the original config file and extract the listeners block. Then we'll
|
||||||
# add another listener for replication. Later we'll write out the result to the
|
# add another listener for replication. Later we'll write out the result to the
|
||||||
# shared config file.
|
# shared config file.
|
||||||
listeners: list[Any]
|
listeners: List[Any]
|
||||||
if using_unix_sockets:
|
if using_unix_sockets:
|
||||||
listeners = [
|
listeners = [
|
||||||
{
|
{
|
||||||
@@ -789,12 +784,12 @@ def generate_worker_files(
|
|||||||
# base shared worker jinja2 template. This config file will be passed to all
|
# base shared worker jinja2 template. This config file will be passed to all
|
||||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
# workers, included Synapse's main process. It is intended mainly for disabling
|
||||||
# functionality when certain workers are spun up, and adding a replication listener.
|
# functionality when certain workers are spun up, and adding a replication listener.
|
||||||
shared_config: dict[str, Any] = {"listeners": listeners}
|
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||||
|
|
||||||
# List of dicts that describe workers.
|
# List of dicts that describe workers.
|
||||||
# We pass this to the Supervisor template later to generate the appropriate
|
# We pass this to the Supervisor template later to generate the appropriate
|
||||||
# program blocks.
|
# program blocks.
|
||||||
worker_descriptors: list[dict[str, Any]] = []
|
worker_descriptors: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
||||||
# type to the ports of each worker. For example:
|
# type to the ports of each worker. For example:
|
||||||
@@ -802,14 +797,14 @@ def generate_worker_files(
|
|||||||
# worker_type: {1234, 1235, ...}}
|
# worker_type: {1234, 1235, ...}}
|
||||||
# }
|
# }
|
||||||
# and will be used to construct 'upstream' nginx directives.
|
# and will be used to construct 'upstream' nginx directives.
|
||||||
nginx_upstreams: dict[str, set[int]] = {}
|
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||||
|
|
||||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
||||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
# will be placed after the proxy_pass directive. The main benefit to representing
|
||||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
# this data as a dict over a str is that we can easily deduplicate endpoints
|
||||||
# across multiple instances of the same worker. The final rendering will be combined
|
# across multiple instances of the same worker. The final rendering will be combined
|
||||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
||||||
nginx_locations: dict[str, str] = {}
|
nginx_locations: Dict[str, str] = {}
|
||||||
|
|
||||||
# Create the worker configuration directory if it doesn't already exist
|
# Create the worker configuration directory if it doesn't already exist
|
||||||
os.makedirs("/conf/workers", exist_ok=True)
|
os.makedirs("/conf/workers", exist_ok=True)
|
||||||
@@ -843,7 +838,7 @@ def generate_worker_files(
|
|||||||
# yaml config file
|
# yaml config file
|
||||||
for worker_name, worker_types_set in requested_worker_types.items():
|
for worker_name, worker_types_set in requested_worker_types.items():
|
||||||
# The collected and processed data will live here.
|
# The collected and processed data will live here.
|
||||||
worker_config: dict[str, Any] = {}
|
worker_config: Dict[str, Any] = {}
|
||||||
|
|
||||||
# Merge all worker config templates for this worker into a single config
|
# Merge all worker config templates for this worker into a single config
|
||||||
for worker_type in worker_types_set:
|
for worker_type in worker_types_set:
|
||||||
@@ -873,13 +868,6 @@ def generate_worker_files(
|
|||||||
else:
|
else:
|
||||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||||
|
|
||||||
# Special case for event_persister: those are just workers that write to
|
|
||||||
# the `events` stream. For other workers, the worker name is the same
|
|
||||||
# name of the stream they write to, but for some reason it is not the
|
|
||||||
# case for event_persister.
|
|
||||||
if "event_persister" in worker_types_set:
|
|
||||||
worker_types_set.add("events")
|
|
||||||
|
|
||||||
# Update the shared config with sharding-related options if necessary
|
# Update the shared config with sharding-related options if necessary
|
||||||
add_worker_roles_to_shared_config(
|
add_worker_roles_to_shared_config(
|
||||||
shared_config, worker_types_set, worker_name, worker_port
|
shared_config, worker_types_set, worker_name, worker_port
|
||||||
@@ -1010,7 +998,6 @@ def generate_worker_files(
|
|||||||
"/healthcheck.sh",
|
"/healthcheck.sh",
|
||||||
healthcheck_urls=healthcheck_urls,
|
healthcheck_urls=healthcheck_urls,
|
||||||
)
|
)
|
||||||
os.chmod("/healthcheck.sh", 0o755)
|
|
||||||
|
|
||||||
# Ensure the logging directory exists
|
# Ensure the logging directory exists
|
||||||
log_dir = data_dir + "/logs"
|
log_dir = data_dir + "/logs"
|
||||||
@@ -1026,7 +1013,7 @@ def generate_worker_log_config(
|
|||||||
Returns: the path to the generated file
|
Returns: the path to the generated file
|
||||||
"""
|
"""
|
||||||
# Check whether we should write worker logs to disk, in addition to the console
|
# Check whether we should write worker logs to disk, in addition to the console
|
||||||
extra_log_template_args: dict[str, str | None] = {}
|
extra_log_template_args: Dict[str, Optional[str]] = {}
|
||||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||||
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
||||||
|
|
||||||
@@ -1050,7 +1037,7 @@ def generate_worker_log_config(
|
|||||||
return log_config_filepath
|
return log_config_filepath
|
||||||
|
|
||||||
|
|
||||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
parser = ArgumentParser()
|
parser = ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--generate-only",
|
"--generate-only",
|
||||||
@@ -1084,7 +1071,7 @@ def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
if not worker_types_env:
|
if not worker_types_env:
|
||||||
# No workers, just the main process
|
# No workers, just the main process
|
||||||
worker_types = []
|
worker_types = []
|
||||||
requested_worker_types: dict[str, Any] = {}
|
requested_worker_types: Dict[str, Any] = {}
|
||||||
else:
|
else:
|
||||||
# Split type names by comma, ignoring whitespace.
|
# Split type names by comma, ignoring whitespace.
|
||||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
worker_types = split_and_strip_string(worker_types_env, ",")
|
||||||
|
|||||||
@@ -3,14 +3,14 @@
|
|||||||
#
|
#
|
||||||
# Used by `complement.sh`. Not suitable for production use.
|
# Used by `complement.sh`. Not suitable for production use.
|
||||||
|
|
||||||
ARG PYTHON_VERSION=3.10
|
ARG PYTHON_VERSION=3.9
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
# We hardcode the use of Debian trixie here because this could change upstream
|
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||||
# and other Dockerfiles used for testing are expecting trixie.
|
# and other Dockerfiles used for testing are expecting bookworm.
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-trixie
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||||
|
|
||||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||||
# install the OS build deps
|
# install the OS build deps
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import os
|
|||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Mapping, MutableMapping, NoReturn
|
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
||||||
@@ -22,11 +22,6 @@ def error(txt: str) -> NoReturn:
|
|||||||
|
|
||||||
|
|
||||||
def flush_buffers() -> None:
|
def flush_buffers() -> None:
|
||||||
"""
|
|
||||||
Python's `print()` buffers output by default, typically waiting until ~8KB
|
|
||||||
accumulates. This method can be used to flush the buffers so we can see the output
|
|
||||||
of any print statements so far.
|
|
||||||
"""
|
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
@@ -50,7 +45,7 @@ def generate_config_from_template(
|
|||||||
config_dir: str,
|
config_dir: str,
|
||||||
config_path: str,
|
config_path: str,
|
||||||
os_environ: Mapping[str, str],
|
os_environ: Mapping[str, str],
|
||||||
ownership: str | None,
|
ownership: Optional[str],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Generate a homeserver.yaml from environment variables
|
"""Generate a homeserver.yaml from environment variables
|
||||||
|
|
||||||
@@ -69,7 +64,7 @@ def generate_config_from_template(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# populate some params from data files (if they exist, else create new ones)
|
# populate some params from data files (if they exist, else create new ones)
|
||||||
environ: dict[str, Any] = dict(os_environ)
|
environ: Dict[str, Any] = dict(os_environ)
|
||||||
secrets = {
|
secrets = {
|
||||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||||
@@ -147,7 +142,7 @@ def generate_config_from_template(
|
|||||||
subprocess.run(args, check=True)
|
subprocess.run(args, check=True)
|
||||||
|
|
||||||
|
|
||||||
def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> None:
|
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||||
"""Run synapse with a --generate-config param to generate a template config file
|
"""Run synapse with a --generate-config param to generate a template config file
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -200,7 +195,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> No
|
|||||||
subprocess.run(args, check=True)
|
subprocess.run(args, check=True)
|
||||||
|
|
||||||
|
|
||||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
mode = args[1] if len(args) > 1 else "run"
|
mode = args[1] if len(args) > 1 else "run"
|
||||||
|
|
||||||
# if we were given an explicit user to switch to, do so
|
# if we were given an explicit user to switch to, do so
|
||||||
|
|||||||
@@ -63,18 +63,6 @@ mdbook serve
|
|||||||
|
|
||||||
The URL at which the docs can be viewed at will be logged.
|
The URL at which the docs can be viewed at will be logged.
|
||||||
|
|
||||||
## Synapse configuration documentation
|
|
||||||
|
|
||||||
The [Configuration
|
|
||||||
Manual](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html)
|
|
||||||
page is generated from a YAML file,
|
|
||||||
[schema/synapse-config.schema.yaml](../schema/synapse-config.schema.yaml). To
|
|
||||||
add new options or modify existing ones, first edit that file, then run
|
|
||||||
[scripts-dev/gen_config_documentation.py](../scripts-dev/gen_config_documentation.py)
|
|
||||||
to generate an updated Configuration Manual markdown file.
|
|
||||||
|
|
||||||
Build the book as described above to preview it in a web browser.
|
|
||||||
|
|
||||||
## Configuration and theming
|
## Configuration and theming
|
||||||
|
|
||||||
The look and behaviour of the website is configured by the [book.toml](../book.toml) file
|
The look and behaviour of the website is configured by the [book.toml](../book.toml) file
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
|
|
||||||
# Setup
|
# Setup
|
||||||
- [Installation](setup/installation.md)
|
- [Installation](setup/installation.md)
|
||||||
- [Security](setup/security.md)
|
|
||||||
- [Using Postgres](postgres.md)
|
- [Using Postgres](postgres.md)
|
||||||
- [Configuring a Reverse Proxy](reverse_proxy.md)
|
- [Configuring a Reverse Proxy](reverse_proxy.md)
|
||||||
- [Configuring a Forward/Outbound Proxy](setup/forward_proxy.md)
|
- [Configuring a Forward/Outbound Proxy](setup/forward_proxy.md)
|
||||||
@@ -50,8 +49,6 @@
|
|||||||
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
||||||
- [Account data callbacks](modules/account_data_callbacks.md)
|
- [Account data callbacks](modules/account_data_callbacks.md)
|
||||||
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
|
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
|
||||||
- [Media repository callbacks](modules/media_repository_callbacks.md)
|
|
||||||
- [Ratelimit callbacks](modules/ratelimit_callbacks.md)
|
|
||||||
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
||||||
- [Workers](workers.md)
|
- [Workers](workers.md)
|
||||||
- [Using `synctl` with Workers](synctl_workers.md)
|
- [Using `synctl` with Workers](synctl_workers.md)
|
||||||
@@ -61,7 +58,6 @@
|
|||||||
- [Admin API](usage/administration/admin_api/README.md)
|
- [Admin API](usage/administration/admin_api/README.md)
|
||||||
- [Account Validity](admin_api/account_validity.md)
|
- [Account Validity](admin_api/account_validity.md)
|
||||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||||
- [Fetch Event](admin_api/fetch_event.md)
|
|
||||||
- [Event Reports](admin_api/event_reports.md)
|
- [Event Reports](admin_api/event_reports.md)
|
||||||
- [Experimental Features](admin_api/experimental_features.md)
|
- [Experimental Features](admin_api/experimental_features.md)
|
||||||
- [Media](admin_api/media_admin_api.md)
|
- [Media](admin_api/media_admin_api.md)
|
||||||
@@ -70,13 +66,11 @@
|
|||||||
- [Registration Tokens](usage/administration/admin_api/registration_tokens.md)
|
- [Registration Tokens](usage/administration/admin_api/registration_tokens.md)
|
||||||
- [Manipulate Room Membership](admin_api/room_membership.md)
|
- [Manipulate Room Membership](admin_api/room_membership.md)
|
||||||
- [Rooms](admin_api/rooms.md)
|
- [Rooms](admin_api/rooms.md)
|
||||||
- [Scheduled tasks](admin_api/scheduled_tasks.md)
|
|
||||||
- [Server Notices](admin_api/server_notices.md)
|
- [Server Notices](admin_api/server_notices.md)
|
||||||
- [Statistics](admin_api/statistics.md)
|
- [Statistics](admin_api/statistics.md)
|
||||||
- [Users](admin_api/user_admin_api.md)
|
- [Users](admin_api/user_admin_api.md)
|
||||||
- [Server Version](admin_api/version_api.md)
|
- [Server Version](admin_api/version_api.md)
|
||||||
- [Federation](usage/administration/admin_api/federation.md)
|
- [Federation](usage/administration/admin_api/federation.md)
|
||||||
- [Client-Server API Extensions](admin_api/client_server_api_extensions.md)
|
|
||||||
- [Manhole](manhole.md)
|
- [Manhole](manhole.md)
|
||||||
- [Monitoring](metrics-howto.md)
|
- [Monitoring](metrics-howto.md)
|
||||||
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||||
@@ -117,8 +111,6 @@
|
|||||||
- [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md)
|
- [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md)
|
||||||
- [Media Repository](media_repository.md)
|
- [Media Repository](media_repository.md)
|
||||||
- [Room and User Statistics](room_and_user_statistics.md)
|
- [Room and User Statistics](room_and_user_statistics.md)
|
||||||
- [Releasing]()
|
|
||||||
- [Release Notes Review Checklist](development/internal_documentation/release_notes_review_checklist.md)
|
|
||||||
- [Scripts]()
|
- [Scripts]()
|
||||||
|
|
||||||
# Other
|
# Other
|
||||||
|
|||||||
@@ -1,67 +0,0 @@
|
|||||||
# Client-Server API Extensions
|
|
||||||
|
|
||||||
Server administrators can set special account data to change how the Client-Server API behaves for
|
|
||||||
their clients. Setting the account data, or having it already set, as a non-admin has no effect.
|
|
||||||
|
|
||||||
All configuration options can be set through the `io.element.synapse.admin_client_config` global
|
|
||||||
account data on the admin's user account.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```
|
|
||||||
PUT /_matrix/client/v3/user/{adminUserId}/account_data/io.element.synapse.admin_client_config
|
|
||||||
{
|
|
||||||
"return_soft_failed_events": true
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## See soft failed events
|
|
||||||
|
|
||||||
Learn more about soft failure from [the spec](https://spec.matrix.org/v1.14/server-server-api/#soft-failure).
|
|
||||||
|
|
||||||
To receive soft failed events in APIs like `/sync` and `/messages`, set `return_soft_failed_events`
|
|
||||||
to `true` in the admin client config. When `false`, the normal behaviour of these endpoints is to
|
|
||||||
exclude soft failed events.
|
|
||||||
|
|
||||||
**Note**: If the policy server flagged the event as spam and that caused soft failure, that will be indicated
|
|
||||||
in the event's `unsigned` content like so:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "m.room.message",
|
|
||||||
"other": "event_fields_go_here",
|
|
||||||
"unsigned": {
|
|
||||||
"io.element.synapse.soft_failed": true,
|
|
||||||
"io.element.synapse.policy_server_spammy": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Default: `false`
|
|
||||||
|
|
||||||
## See events marked spammy by policy servers
|
|
||||||
|
|
||||||
Learn more about policy servers from [MSC4284](https://github.com/matrix-org/matrix-spec-proposals/pull/4284).
|
|
||||||
|
|
||||||
Similar to `return_soft_failed_events`, clients logged in with admin accounts can see events which were
|
|
||||||
flagged by the policy server as spammy (and thus soft failed) by setting `return_policy_server_spammy_events`
|
|
||||||
to `true`.
|
|
||||||
|
|
||||||
`return_policy_server_spammy_events` may be `true` while `return_soft_failed_events` is `false` to only see
|
|
||||||
policy server-flagged events. When `return_soft_failed_events` is `true` however, `return_policy_server_spammy_events`
|
|
||||||
is always `true`.
|
|
||||||
|
|
||||||
Events which were flagged by the policy will be flagged as `io.element.synapse.policy_server_spammy` in the
|
|
||||||
event's `unsigned` content, like so:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "m.room.message",
|
|
||||||
"other": "event_fields_go_here",
|
|
||||||
"unsigned": {
|
|
||||||
"io.element.synapse.soft_failed": true,
|
|
||||||
"io.element.synapse.policy_server_spammy": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Default: `true` if `return_soft_failed_events` is `true`, otherwise `false`
|
|
||||||
@@ -117,6 +117,7 @@ It returns a JSON body like the following:
|
|||||||
"hashes": {
|
"hashes": {
|
||||||
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
||||||
},
|
},
|
||||||
|
"origin": "matrix.org",
|
||||||
"origin_server_ts": 1592291711430,
|
"origin_server_ts": 1592291711430,
|
||||||
"prev_events": [
|
"prev_events": [
|
||||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
||||||
|
|||||||
@@ -1,53 +0,0 @@
|
|||||||
# Fetch Event API
|
|
||||||
|
|
||||||
The fetch event API allows admins to fetch an event regardless of their membership in the room it
|
|
||||||
originated in.
|
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token`
|
|
||||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
|
||||||
|
|
||||||
Request:
|
|
||||||
```http
|
|
||||||
GET /_synapse/admin/v1/fetch_event/<event_id>
|
|
||||||
```
|
|
||||||
|
|
||||||
The API returns a JSON body like the following:
|
|
||||||
|
|
||||||
Response:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"event": {
|
|
||||||
"auth_events": [
|
|
||||||
"$WhLChbYg6atHuFRP7cUd95naUtc8L0f7fqeizlsUVvc",
|
|
||||||
"$9Wj8dt02lrNEWweeq-KjRABUYKba0K9DL2liRvsAdtQ",
|
|
||||||
"$qJxBFxBt8_ODd9b3pgOL_jXP98S_igc1_kizuPSZFi4"
|
|
||||||
],
|
|
||||||
"content": {
|
|
||||||
"body": "Hey now",
|
|
||||||
"msgtype": "m.text"
|
|
||||||
},
|
|
||||||
"depth": 6,
|
|
||||||
"event_id": "$hJ_kcXbVMcI82JDrbqfUJIHu61tJD86uIFJ_8hNHi7s",
|
|
||||||
"hashes": {
|
|
||||||
"sha256": "LiNw8DtrRVf55EgAH8R42Wz7WCJUqGsPt2We6qZO5Rg"
|
|
||||||
},
|
|
||||||
"origin_server_ts": 799,
|
|
||||||
"prev_events": [
|
|
||||||
"$cnSUrNMnC3Ywh9_W7EquFxYQjC_sT3BAAVzcUVxZq1g"
|
|
||||||
],
|
|
||||||
"room_id": "!aIhKToCqgPTBloWMpf:test",
|
|
||||||
"sender": "@user:test",
|
|
||||||
"signatures": {
|
|
||||||
"test": {
|
|
||||||
"ed25519:a_lPym": "7mqSDwK1k7rnw34Dd8Fahu0rhPW7jPmcWPRtRDoEN9Yuv+BCM2+Rfdpv2MjxNKy3AYDEBwUwYEuaKMBaEMiKAQ"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "m.room.message",
|
|
||||||
"unsigned": {
|
|
||||||
"age_ts": 799
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
@@ -39,67 +39,6 @@ the use of the
|
|||||||
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
|
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
|
||||||
Admin API.
|
Admin API.
|
||||||
|
|
||||||
## Query a piece of media by ID
|
|
||||||
|
|
||||||
This API returns information about a piece of local or cached remote media given the origin server name and media id. If
|
|
||||||
information is requested for remote media which is not cached the endpoint will return 404.
|
|
||||||
|
|
||||||
Request:
|
|
||||||
```http
|
|
||||||
GET /_synapse/admin/v1/media/<origin>/<media_id>
|
|
||||||
```
|
|
||||||
|
|
||||||
The API returns a JSON body with media info like the following:
|
|
||||||
|
|
||||||
Response:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"media_info": {
|
|
||||||
"media_origin": "remote.com",
|
|
||||||
"user_id": null,
|
|
||||||
"media_id": "sdginwegWEG",
|
|
||||||
"media_type": "img/png",
|
|
||||||
"media_length": 67,
|
|
||||||
"upload_name": "test.png",
|
|
||||||
"created_ts": 300,
|
|
||||||
"filesystem_id": "wgeweg",
|
|
||||||
"url_cache": null,
|
|
||||||
"last_access_ts": 400,
|
|
||||||
"quarantined_by": null,
|
|
||||||
"authenticated": false,
|
|
||||||
"safe_from_quarantine": null,
|
|
||||||
"sha256": "ebf4f635a17d10d6eb46ba680b70142419aa3220f228001a036d311a22ee9d2a"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Listing all quarantined media
|
|
||||||
|
|
||||||
This API returns a list of all quarantined media on the server. It is paginated, and can be scoped to either local or
|
|
||||||
remote media. Note that the pagination values are also scoped to the request parameters - changing them but keeping the
|
|
||||||
same pagination values will result in unexpected results.
|
|
||||||
|
|
||||||
Request:
|
|
||||||
```http
|
|
||||||
GET /_synapse/admin/v1/media/quarantined?from=0&limit=100&kind=local
|
|
||||||
```
|
|
||||||
|
|
||||||
`from` and `limit` are optional parameters, and default to `0` and `100` respectively. They are the row index and number
|
|
||||||
of rows to return - they are not timestamps.
|
|
||||||
|
|
||||||
`kind` *MUST* either be `local` or `remote`.
|
|
||||||
|
|
||||||
The API returns a JSON body containing MXC URIs for the quarantined media, like the following:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"media": [
|
|
||||||
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
|
|
||||||
"mxc://localhost/abcdefghijklmnopqrstuvwx"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
# Quarantine media
|
# Quarantine media
|
||||||
|
|
||||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||||
|
|||||||
@@ -794,7 +794,6 @@ A response body like the following is returned:
|
|||||||
"results": [
|
"results": [
|
||||||
{
|
{
|
||||||
"delete_id": "delete_id1",
|
"delete_id": "delete_id1",
|
||||||
"room_id": "!roomid:example.com",
|
|
||||||
"status": "failed",
|
"status": "failed",
|
||||||
"error": "error message",
|
"error": "error message",
|
||||||
"shutdown_room": {
|
"shutdown_room": {
|
||||||
@@ -805,8 +804,7 @@ A response body like the following is returned:
|
|||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
"delete_id": "delete_id2",
|
"delete_id": "delete_id2",
|
||||||
"room_id": "!roomid:example.com",
|
"status": "purging",
|
||||||
"status": "active",
|
|
||||||
"shutdown_room": {
|
"shutdown_room": {
|
||||||
"kicked_users": [
|
"kicked_users": [
|
||||||
"@foobar:example.com"
|
"@foobar:example.com"
|
||||||
@@ -843,9 +841,7 @@ A response body like the following is returned:
|
|||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"status": "active",
|
"status": "purging",
|
||||||
"delete_id": "bHkCNQpHqOaFhPtK",
|
|
||||||
"room_id": "!roomid:example.com",
|
|
||||||
"shutdown_room": {
|
"shutdown_room": {
|
||||||
"kicked_users": [
|
"kicked_users": [
|
||||||
"@foobar:example.com"
|
"@foobar:example.com"
|
||||||
@@ -873,11 +869,10 @@ The following fields are returned in the JSON response body:
|
|||||||
- `results` - An array of objects, each containing information about one task.
|
- `results` - An array of objects, each containing information about one task.
|
||||||
This field is omitted from the result when you query by `delete_id`.
|
This field is omitted from the result when you query by `delete_id`.
|
||||||
Task objects contain the following fields:
|
Task objects contain the following fields:
|
||||||
- `delete_id` - The ID for this purge
|
- `delete_id` - The ID for this purge if you query by `room_id`.
|
||||||
- `room_id` - The ID of the room being deleted
|
|
||||||
- `status` - The status will be one of:
|
- `status` - The status will be one of:
|
||||||
- `scheduled` - The deletion is waiting to be started
|
- `shutting_down` - The process is removing users from the room.
|
||||||
- `active` - The process is purging the room and event data from database.
|
- `purging` - The process is purging the room and event data from database.
|
||||||
- `complete` - The process has completed successfully.
|
- `complete` - The process has completed successfully.
|
||||||
- `failed` - The process is aborted, an error has occurred.
|
- `failed` - The process is aborted, an error has occurred.
|
||||||
- `error` - A string that shows an error message if `status` is `failed`.
|
- `error` - A string that shows an error message if `status` is `failed`.
|
||||||
@@ -1115,76 +1110,3 @@ Example response:
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
# Admin Space Hierarchy Endpoint
|
|
||||||
|
|
||||||
This API allows an admin to fetch the space/room hierarchy for a given space,
|
|
||||||
returning details about that room and any children the room may have, paginating
|
|
||||||
over the space tree in a depth-first manner to locate child rooms. This is
|
|
||||||
functionally similar to the [CS Hierarchy](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1roomsroomidhierarchy) endpoint but does not check for
|
|
||||||
room membership when returning room summaries.
|
|
||||||
|
|
||||||
The endpoint does not query other servers over federation about remote rooms
|
|
||||||
that the server has not joined. This is a deliberate trade-off: while this
|
|
||||||
means it will leave some holes in the hierarchy that we could otherwise
|
|
||||||
sometimes fill in, it significantly improves the endpoint's response time and
|
|
||||||
the admin endpoint is designed for managing rooms local to the homeserver
|
|
||||||
anyway.
|
|
||||||
|
|
||||||
**Parameters**
|
|
||||||
|
|
||||||
The following query parameters are available:
|
|
||||||
|
|
||||||
* `from` - An optional pagination token, provided when there are more rooms to
|
|
||||||
return than the limit.
|
|
||||||
* `limit` - Maximum amount of rooms to return. Must be a non-negative integer,
|
|
||||||
defaults to `50`.
|
|
||||||
* `max_depth` - The maximum depth in the tree to explore, must be a non-negative
|
|
||||||
integer. 0 would correspond to just the root room, 1 would include just the
|
|
||||||
root room's children, etc. If not provided will recurse into the space tree without limit.
|
|
||||||
|
|
||||||
Request:
|
|
||||||
|
|
||||||
```http
|
|
||||||
GET /_synapse/admin/v1/rooms/<room_id>/hierarchy
|
|
||||||
```
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"rooms":
|
|
||||||
[
|
|
||||||
{ "children_state": [
|
|
||||||
{
|
|
||||||
"content": {
|
|
||||||
"via": ["local_test_server"]
|
|
||||||
},
|
|
||||||
"origin_server_ts": 1500,
|
|
||||||
"sender": "@user:test",
|
|
||||||
"state_key": "!QrMkkqBSwYRIFNFCso:test",
|
|
||||||
"type": "m.space.child"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"name": "space room",
|
|
||||||
"guest_can_join": false,
|
|
||||||
"join_rule": "public",
|
|
||||||
"num_joined_members": 1,
|
|
||||||
"room_id": "!sPOpNyMHbZAoAOsOFL:test",
|
|
||||||
"room_type": "m.space",
|
|
||||||
"world_readable": false
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"children_state": [],
|
|
||||||
"guest_can_join": true,
|
|
||||||
"join_rule": "invite",
|
|
||||||
"name": "nefarious",
|
|
||||||
"num_joined_members": 1,
|
|
||||||
"room_id": "!QrMkkqBSwYRIFNFCso:test",
|
|
||||||
"topic": "being bad",
|
|
||||||
"world_readable": false}
|
|
||||||
],
|
|
||||||
"next_batch": "KUYmRbeSpAoaAIgOKGgyaCEn"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -1,54 +0,0 @@
|
|||||||
# Show scheduled tasks
|
|
||||||
|
|
||||||
This API returns information about scheduled tasks.
|
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token`
|
|
||||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
|
||||||
|
|
||||||
The api is:
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v1/scheduled_tasks
|
|
||||||
```
|
|
||||||
|
|
||||||
It returns a JSON body like the following:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"scheduled_tasks": [
|
|
||||||
{
|
|
||||||
"id": "GSA124oegf1",
|
|
||||||
"action": "shutdown_room",
|
|
||||||
"status": "complete",
|
|
||||||
"timestamp_ms": 23423523,
|
|
||||||
"resource_id": "!roomid",
|
|
||||||
"result": "some result",
|
|
||||||
"error": null
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Query parameters:**
|
|
||||||
|
|
||||||
* `action_name`: string - Is optional. Returns only the scheduled tasks with the given action name.
|
|
||||||
* `resource_id`: string - Is optional. Returns only the scheduled tasks with the given resource id.
|
|
||||||
* `status`: string - Is optional. Returns only the scheduled tasks matching the given status, one of
|
|
||||||
- "scheduled" - Task is scheduled but not active
|
|
||||||
- "active" - Task is active and probably running, and if not will be run on next scheduler loop run
|
|
||||||
- "complete" - Task has completed successfully
|
|
||||||
- "failed" - Task is over and either returned a failed status, or had an exception
|
|
||||||
|
|
||||||
* `max_timestamp`: int - Is optional. Returns only the scheduled tasks with a timestamp inferior to the specified one.
|
|
||||||
|
|
||||||
**Response**
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body along with a `200` HTTP status code:
|
|
||||||
|
|
||||||
* `id`: string - ID of scheduled task.
|
|
||||||
* `action`: string - The name of the scheduled task's action.
|
|
||||||
* `status`: string - The status of the scheduled task.
|
|
||||||
* `timestamp_ms`: integer - The timestamp (in milliseconds since the unix epoch) of the given task - If the status is "scheduled" then this represents when it should be launched.
|
|
||||||
Otherwise it represents the last time this task got a change of state.
|
|
||||||
* `resource_id`: Optional string - The resource id of the scheduled task, if it possesses one
|
|
||||||
* `result`: Optional Json - Any result of the scheduled task, if given
|
|
||||||
* `error`: Optional string - If the task has the status "failed", the error associated with this failure
|
|
||||||
@@ -163,8 +163,7 @@ Body parameters:
|
|||||||
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
|
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
|
||||||
- `user_type` - **string** or null, optional. If not provided, the user type will be
|
- `user_type` - **string** or null, optional. If not provided, the user type will be
|
||||||
not be changed. If `null` is given, the user type will be cleared.
|
not be changed. If `null` is given, the user type will be cleared.
|
||||||
Other allowed options are: `bot` and `support` and any extra values defined in the homserver
|
Other allowed options are: `bot` and `support`.
|
||||||
[configuration](../usage/configuration/config_documentation.md#user_types).
|
|
||||||
|
|
||||||
## List Accounts
|
## List Accounts
|
||||||
### List Accounts (V2)
|
### List Accounts (V2)
|
||||||
@@ -505,55 +504,6 @@ with a body of:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## List room memberships of a user
|
|
||||||
|
|
||||||
Gets a list of room memberships for a specific `user_id`. This
|
|
||||||
endpoint differs from
|
|
||||||
[`GET /_synapse/admin/v1/users/<user_id>/joined_rooms`](#list-joined-rooms-of-a-user)
|
|
||||||
in that it returns rooms with memberships other than "join".
|
|
||||||
|
|
||||||
The API is:
|
|
||||||
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v1/users/<user_id>/memberships
|
|
||||||
```
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"memberships": {
|
|
||||||
"!DuGcnbhHGaSZQoNQR:matrix.org": "join",
|
|
||||||
"!ZtSaPCawyWtxfWiIy:matrix.org": "leave",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
which is a list of room membership states for the given user. This endpoint can
|
|
||||||
be used with both local and remote users, with the caveat that the homeserver will
|
|
||||||
only be aware of the memberships for rooms that one of its local users has joined.
|
|
||||||
|
|
||||||
Remote user memberships may also be out of date if all local users have since left
|
|
||||||
a room. The homeserver will thus no longer receive membership updates about it.
|
|
||||||
|
|
||||||
The list includes rooms that the user has since left; other membership states (knock,
|
|
||||||
invite, etc.) are also possible.
|
|
||||||
|
|
||||||
Note that rooms will only disappear from this list if they are
|
|
||||||
[purged](./rooms.md#delete-room-api) from the homeserver.
|
|
||||||
|
|
||||||
**Parameters**
|
|
||||||
|
|
||||||
The following parameters should be set in the URL:
|
|
||||||
|
|
||||||
- `user_id` - fully qualified: for example, `@user:server.com`.
|
|
||||||
|
|
||||||
**Response**
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body:
|
|
||||||
|
|
||||||
- `memberships` - A map of `room_id` (string) to `membership` state (string).
|
|
||||||
|
|
||||||
## List joined rooms of a user
|
## List joined rooms of a user
|
||||||
|
|
||||||
Gets a list of all `room_id` that a specific `user_id` is joined to and is a member of (participating in).
|
Gets a list of all `room_id` that a specific `user_id` is joined to and is a member of (participating in).
|
||||||
@@ -1004,8 +954,7 @@ A response body like the following is returned:
|
|||||||
"last_seen_ip": "1.2.3.4",
|
"last_seen_ip": "1.2.3.4",
|
||||||
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
||||||
"last_seen_ts": 1474491775024,
|
"last_seen_ts": 1474491775024,
|
||||||
"user_id": "<user_id>",
|
"user_id": "<user_id>"
|
||||||
"dehydrated": false
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"device_id": "AUIECTSRND",
|
"device_id": "AUIECTSRND",
|
||||||
@@ -1013,8 +962,7 @@ A response body like the following is returned:
|
|||||||
"last_seen_ip": "1.2.3.5",
|
"last_seen_ip": "1.2.3.5",
|
||||||
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
||||||
"last_seen_ts": 1474491775025,
|
"last_seen_ts": 1474491775025,
|
||||||
"user_id": "<user_id>",
|
"user_id": "<user_id>"
|
||||||
"dehydrated": false
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"total": 2
|
"total": 2
|
||||||
@@ -1044,7 +992,6 @@ The following fields are returned in the JSON response body:
|
|||||||
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
|
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
|
||||||
devices was last seen. (May be a few minutes out of date, for efficiency reasons).
|
devices was last seen. (May be a few minutes out of date, for efficiency reasons).
|
||||||
- `user_id` - Owner of device.
|
- `user_id` - Owner of device.
|
||||||
- `dehydrated` - Whether the device is a dehydrated device.
|
|
||||||
|
|
||||||
- `total` - Total number of user's devices.
|
- `total` - Total number of user's devices.
|
||||||
|
|
||||||
@@ -1276,7 +1223,7 @@ See also the
|
|||||||
|
|
||||||
## Controlling whether a user is shadow-banned
|
## Controlling whether a user is shadow-banned
|
||||||
|
|
||||||
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
|
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
|
||||||
A shadow-banned users receives successful responses to their client-server API requests,
|
A shadow-banned users receives successful responses to their client-server API requests,
|
||||||
but the events are not propagated into rooms. This can be an effective tool as it
|
but the events are not propagated into rooms. This can be an effective tool as it
|
||||||
(hopefully) takes longer for the user to realise they are being moderated before
|
(hopefully) takes longer for the user to realise they are being moderated before
|
||||||
@@ -1513,11 +1460,8 @@ _Added in Synapse 1.72.0._
|
|||||||
|
|
||||||
## Redact all the events of a user
|
## Redact all the events of a user
|
||||||
|
|
||||||
This endpoint allows an admin to redact the events of a given user. There are no restrictions on
|
This endpoint allows an admin to redact the events of a given user. There are no restrictions on redactions for a
|
||||||
redactions for a local user. By default, we puppet the user who sent the message to redact it themselves.
|
local user. By default, we puppet the user who sent the message to redact it themselves. Redactions for non-local users are issued using the admin user, and will fail in rooms where the admin user is not admin/does not have the specified power level to issue redactions.
|
||||||
Redactions for non-local users are issued using the admin user, and will fail in rooms where the
|
|
||||||
admin user is not admin/does not have the specified power level to issue redactions. An option
|
|
||||||
is provided to override the default and allow the admin to issue the redactions in all cases.
|
|
||||||
|
|
||||||
The API is
|
The API is
|
||||||
```
|
```
|
||||||
@@ -1527,7 +1471,7 @@ POST /_synapse/admin/v1/user/$user_id/redact
|
|||||||
"rooms": ["!roomid1", "!roomid2"]
|
"rooms": ["!roomid1", "!roomid2"]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
|
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
|
||||||
otherwise all the events in the rooms provided in the request will be redacted.
|
otherwise all the events in the rooms provided in the request will be redacted.
|
||||||
|
|
||||||
The API starts redaction process running, and returns immediately with a JSON body with
|
The API starts redaction process running, and returns immediately with a JSON body with
|
||||||
@@ -1553,10 +1497,7 @@ The following JSON body parameter must be provided:
|
|||||||
The following JSON body parameters are optional:
|
The following JSON body parameters are optional:
|
||||||
|
|
||||||
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
|
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
|
||||||
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided.
|
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
|
||||||
- `use_admin` - If set to `true`, the admin user is used to issue the redactions, rather than puppeting the user. Useful
|
|
||||||
when the admin is also the moderator of the rooms that require redactions. Note that the redactions will fail in rooms
|
|
||||||
where the admin does not have the sufficient power level to issue the redactions.
|
|
||||||
|
|
||||||
_Added in Synapse 1.116.0._
|
_Added in Synapse 1.116.0._
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
# Deprecation Policy
|
Deprecation Policy for Platform Dependencies
|
||||||
|
============================================
|
||||||
|
|
||||||
Synapse has a number of **platform dependencies** (Python, Rust, PostgreSQL, and SQLite)
|
Synapse has a number of platform dependencies, including Python, Rust,
|
||||||
and **application dependencies** (Python and Rust packages). This document outlines the
|
PostgreSQL and SQLite. This document outlines the policy towards which versions
|
||||||
policy towards which versions we support, and when we drop support for versions in the
|
we support, and when we drop support for versions in the future.
|
||||||
future.
|
|
||||||
|
|
||||||
## Platform Dependencies
|
|
||||||
|
Policy
|
||||||
|
------
|
||||||
|
|
||||||
Synapse follows the upstream support life cycles for Python and PostgreSQL,
|
Synapse follows the upstream support life cycles for Python and PostgreSQL,
|
||||||
i.e. when a version reaches End of Life Synapse will withdraw support for that
|
i.e. when a version reaches End of Life Synapse will withdraw support for that
|
||||||
@@ -21,11 +23,11 @@ people building from source should ensure they can fetch recent versions of Rust
|
|||||||
(e.g. by using [rustup](https://rustup.rs/)).
|
(e.g. by using [rustup](https://rustup.rs/)).
|
||||||
|
|
||||||
The oldest supported version of SQLite is the version
|
The oldest supported version of SQLite is the version
|
||||||
[provided](https://packages.debian.org/oldstable/libsqlite3-0) by
|
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
||||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||||
|
|
||||||
|
Context
|
||||||
### Context
|
-------
|
||||||
|
|
||||||
It is important for system admins to have a clear understanding of the platform
|
It is important for system admins to have a clear understanding of the platform
|
||||||
requirements of Synapse and its deprecation policies so that they can
|
requirements of Synapse and its deprecation policies so that they can
|
||||||
@@ -48,42 +50,4 @@ the ecosystem.
|
|||||||
On a similar note, SQLite does not generally have a concept of "supported
|
On a similar note, SQLite does not generally have a concept of "supported
|
||||||
release"; bugfixes are published for the latest minor release only. We chose to
|
release"; bugfixes are published for the latest minor release only. We chose to
|
||||||
track Debian's oldstable as this is relatively conservative, predictably updated
|
track Debian's oldstable as this is relatively conservative, predictably updated
|
||||||
and is consistent with the `.deb` packages released by Matrix.org.
|
and is consistent with the `.deb` packages released by Matrix.org.
|
||||||
|
|
||||||
|
|
||||||
## Application dependencies
|
|
||||||
|
|
||||||
For application-level Python dependencies, we often specify loose version constraints
|
|
||||||
(ex. `>=X.Y.Z`) to be forwards compatible with any new versions. Upper bounds (`<A.B.C`)
|
|
||||||
are only added when necessary to prevent known incompatibilities.
|
|
||||||
|
|
||||||
When selecting a minimum version, while we are mindful of the impact on downstream
|
|
||||||
package maintainers, our primary focus is on the maintainability and progress of Synapse
|
|
||||||
itself.
|
|
||||||
|
|
||||||
For developers, a Python dependency version can be considered a "no-brainer" upgrade once it is
|
|
||||||
available in both the latest [Debian Stable](https://packages.debian.org/stable/) and
|
|
||||||
[Ubuntu LTS](https://launchpad.net/ubuntu) repositories. No need to burden yourself with
|
|
||||||
extra scrutiny or consideration at this point.
|
|
||||||
|
|
||||||
We aggressively update Rust dependencies. Since these are statically linked and managed
|
|
||||||
entirely by `cargo` during build, they *can* pose no ongoing maintenance burden on others.
|
|
||||||
This allows us to freely upgrade to leverage the latest ecosystem advancements assuming
|
|
||||||
they don't have their own system-level dependencies.
|
|
||||||
|
|
||||||
|
|
||||||
### Context
|
|
||||||
|
|
||||||
Because Python dependencies can easily be managed in a virtual environment, we are less
|
|
||||||
concerned about the criteria for selecting minimum versions. The only thing of concern
|
|
||||||
is making sure we're not making it unnecessarily difficult for downstream package
|
|
||||||
maintainers. Generally, this just means avoiding the bleeding edge for a few months.
|
|
||||||
|
|
||||||
The situation for Rust dependencies is fundamentally different. For packagers, the
|
|
||||||
concerns around Python dependency versions do not apply. The `cargo` tool handles
|
|
||||||
downloading and building all libraries to satisfy dependencies, and these libraries are
|
|
||||||
statically linked into the final binary. This means that from a packager's perspective,
|
|
||||||
the Rust dependency versions are an internal build detail, not a runtime dependency to
|
|
||||||
be managed on the target system. Consequently, we have even greater flexibility to
|
|
||||||
upgrade Rust dependencies as needed for the project. Some distros (e.g. Fedora) do
|
|
||||||
package Rust libraries, but this appears to be the outlier rather than the norm.
|
|
||||||
@@ -29,6 +29,8 @@ easiest way of installing the latest version is to use [rustup](https://rustup.r
|
|||||||
|
|
||||||
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
|
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
|
||||||
|
|
||||||
|
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
|
||||||
|
|
||||||
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
||||||
|
|
||||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||||
@@ -320,7 +322,7 @@ The following command will let you run the integration test with the most common
|
|||||||
configuration:
|
configuration:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bookworm
|
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
|
||||||
```
|
```
|
||||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||||
|
|
||||||
|
|||||||
@@ -79,17 +79,17 @@ phonenumbers = [
|
|||||||
We can see this pinned version inside the docker image for that release:
|
We can see this pinned version inside the docker image for that release:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ docker pull matrixdotorg/synapse:latest
|
$ docker pull vectorim/synapse:v1.97.0
|
||||||
...
|
...
|
||||||
$ docker run --entrypoint pip matrixdotorg/synapse:latest show phonenumbers
|
$ docker run --entrypoint pip vectorim/synapse:v1.97.0 show phonenumbers
|
||||||
Name: phonenumbers
|
Name: phonenumbers
|
||||||
Version: 9.0.15
|
Version: 8.12.44
|
||||||
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
|
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
|
||||||
Home-page: https://github.com/daviddrysdale/python-phonenumbers
|
Home-page: https://github.com/daviddrysdale/python-phonenumbers
|
||||||
Author: David Drysdale
|
Author: David Drysdale
|
||||||
Author-email: dmd@lurklurk.org
|
Author-email: dmd@lurklurk.org
|
||||||
License: Apache License 2.0
|
License: Apache License 2.0
|
||||||
Location: /usr/local/lib/python3.12/site-packages
|
Location: /usr/local/lib/python3.9/site-packages
|
||||||
Requires:
|
Requires:
|
||||||
Required-by: matrix-synapse
|
Required-by: matrix-synapse
|
||||||
```
|
```
|
||||||
@@ -164,7 +164,10 @@ $ poetry cache clear --all .
|
|||||||
# including the wheel artifacts which is not covered by the above command
|
# including the wheel artifacts which is not covered by the above command
|
||||||
# (see https://github.com/python-poetry/poetry/issues/10304)
|
# (see https://github.com/python-poetry/poetry/issues/10304)
|
||||||
#
|
#
|
||||||
# This is necessary in order to rebuild or fetch new wheels.
|
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
|
||||||
|
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
|
||||||
|
# in order to incorporate the correct dynamically linked library locations otherwise you
|
||||||
|
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
|
||||||
$ rm -rf $(poetry config cache-dir)
|
$ rm -rf $(poetry config cache-dir)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +0,0 @@
|
|||||||
# Release notes review checklist
|
|
||||||
|
|
||||||
The Synapse release process includes a step to review the changelog before
|
|
||||||
publishing it. The following is a list of common points to check for:
|
|
||||||
|
|
||||||
1. Check whether any similar entries that can be merged together (make sure to include all mentioned PRs at the end of the line, i.e. (#1234, #1235, ...)).
|
|
||||||
2. Link any MSCXXXX lines to the Matrix Spec Change itself: <https://github.com/matrix-org/matrix-spec-proposals/pull/xxxx>.
|
|
||||||
3. Wrap any class names, variable names, etc. in back-ticks, if needed.
|
|
||||||
4. Hoist any relevant security, deprecation, etc. announcements to the top of this version's changelog for visibility. This includes any announcements in RCs for this release.
|
|
||||||
5. Check the upgrade notes for any important announcements, and link to them from the changelog if warranted.
|
|
||||||
6. Quickly skim and check that each entry is in the appropriate section.
|
|
||||||
7. Entries under the Bugfixes section should ideally state what Synapse version the bug was introduced in. For example: "Fixed a bug introduced in v1.x.y" or if no version can be identified, "Fixed a long-standing bug ...".
|
|
||||||
@@ -299,7 +299,7 @@ logcontext is not finished before the `async` processing completes.
|
|||||||
|
|
||||||
**Bad**:
|
**Bad**:
|
||||||
```python
|
```python
|
||||||
cache: ObservableDeferred[None] | None = None
|
cache: Optional[ObservableDeferred[None]] = None
|
||||||
|
|
||||||
async def do_something_else(
|
async def do_something_else(
|
||||||
to_resolve: Deferred[None]
|
to_resolve: Deferred[None]
|
||||||
@@ -326,7 +326,7 @@ with LoggingContext("request-1"):
|
|||||||
|
|
||||||
**Good**:
|
**Good**:
|
||||||
```python
|
```python
|
||||||
cache: ObservableDeferred[None] | None = None
|
cache: Optional[ObservableDeferred[None]] = None
|
||||||
|
|
||||||
async def do_something_else(
|
async def do_something_else(
|
||||||
to_resolve: Deferred[None]
|
to_resolve: Deferred[None]
|
||||||
@@ -358,7 +358,7 @@ with LoggingContext("request-1"):
|
|||||||
|
|
||||||
**OK**:
|
**OK**:
|
||||||
```python
|
```python
|
||||||
cache: ObservableDeferred[None] | None = None
|
cache: Optional[ObservableDeferred[None]] = None
|
||||||
|
|
||||||
async def do_something_else(
|
async def do_something_else(
|
||||||
to_resolve: Deferred[None]
|
to_resolve: Deferred[None]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Streams
|
## Streams
|
||||||
|
|
||||||
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
|
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
|
||||||
https://github.com/element-hq/synapse/blob/develop/synapse/storage/util/id_generators.py
|
https://github.com/element-hq/synapse/blob/develop/synapse/storage/util/id_generators.py
|
||||||
@@ -19,7 +19,7 @@ To that end, let's describe streams formally, paraphrasing from the docstring of
|
|||||||
https://github.com/element-hq/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
|
https://github.com/element-hq/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
|
||||||
).
|
).
|
||||||
|
|
||||||
## Definition
|
### Definition
|
||||||
|
|
||||||
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
|
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
|
||||||
Only "writers" can add facts to a stream, and there may be multiple writers.
|
Only "writers" can add facts to a stream, and there may be multiple writers.
|
||||||
@@ -47,7 +47,7 @@ But unhappy cases (e.g. transaction rollback due to an error) also count as comp
|
|||||||
Once completed, the rows written with that stream ID are fixed, and no new rows
|
Once completed, the rows written with that stream ID are fixed, and no new rows
|
||||||
will be inserted with that ID.
|
will be inserted with that ID.
|
||||||
|
|
||||||
## Current stream ID
|
### Current stream ID
|
||||||
|
|
||||||
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
|
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
|
||||||
|
|
||||||
@@ -93,7 +93,7 @@ Consider a single-writer stream which is initially at ID 1.
|
|||||||
| Complete 6 | 6 | |
|
| Complete 6 | 6 | |
|
||||||
|
|
||||||
|
|
||||||
## Multi-writer streams
|
### Multi-writer streams
|
||||||
|
|
||||||
There are two ways to view a multi-writer stream.
|
There are two ways to view a multi-writer stream.
|
||||||
|
|
||||||
@@ -115,7 +115,7 @@ The facts this stream holds are instructions to "you should now invalidate these
|
|||||||
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
|
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
|
||||||
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
|
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
|
||||||
|
|
||||||
## Writing to streams
|
### Writing to streams
|
||||||
|
|
||||||
Writers need to track:
|
Writers need to track:
|
||||||
- track their current position (i.e. its own per-writer stream ID).
|
- track their current position (i.e. its own per-writer stream ID).
|
||||||
@@ -133,7 +133,7 @@ To complete a fact, first remove it from your map of facts currently awaiting co
|
|||||||
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
|
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
|
||||||
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
|
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
|
||||||
|
|
||||||
## Subscribing to streams
|
### Subscribing to streams
|
||||||
|
|
||||||
Readers need to track the current position of every writer.
|
Readers need to track the current position of every writer.
|
||||||
|
|
||||||
@@ -146,44 +146,10 @@ The `RDATA` itself is not a self-contained representation of the fact;
|
|||||||
readers will have to query the stream tables for the full details.
|
readers will have to query the stream tables for the full details.
|
||||||
Readers must also advance their record of the writer's current position for that stream.
|
Readers must also advance their record of the writer's current position for that stream.
|
||||||
|
|
||||||
## Summary
|
# Summary
|
||||||
|
|
||||||
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
|
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Cheatsheet for creating a new stream
|
|
||||||
|
|
||||||
These rough notes and links may help you to create a new stream and add all the
|
|
||||||
necessary registration and event handling.
|
|
||||||
|
|
||||||
**Create your stream:**
|
|
||||||
- [create a stream class and stream row class](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/_base.py#L728)
|
|
||||||
- will need an [ID generator](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L75)
|
|
||||||
- may need [writer configuration](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/config/workers.py#L177), if there isn't already an obvious source of configuration for which workers should be designated as writers to your new stream.
|
|
||||||
- if adding new writer configuration, add Docker-worker configuration, which lets us configure the writer worker in Complement tests: [[1]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L331), [[2]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L440)
|
|
||||||
- most of the time, you will likely introduce a new datastore class for the concept represented by the new stream, unless there is already an obvious datastore that covers it.
|
|
||||||
- consider whether it may make sense to introduce a handler
|
|
||||||
|
|
||||||
**Register your stream in:**
|
|
||||||
- [`STREAMS_MAP`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/__init__.py#L71)
|
|
||||||
|
|
||||||
**Advance your stream in:**
|
|
||||||
- [`process_replication_position` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L111)
|
|
||||||
- don't forget the super call
|
|
||||||
|
|
||||||
**If you're going to do any caching that needs invalidation from new rows:**
|
|
||||||
- add invalidations to [`process_replication_rows` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L91)
|
|
||||||
- don't forget the super call
|
|
||||||
- add local-only [invalidations to your writer transactions](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L201)
|
|
||||||
|
|
||||||
**For streams to be used in sync:**
|
|
||||||
- add a new field to [`StreamToken`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L1003)
|
|
||||||
- add a new [`StreamKeyType`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L999)
|
|
||||||
- add appropriate wake-up rules
|
|
||||||
- in [`on_rdata`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/client.py#L260)
|
|
||||||
- locally on the same worker when completing a write, [e.g. in your handler](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/handlers/thread_subscriptions.py#L139)
|
|
||||||
- add the stream in [`bound_future_token`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/streams/events.py#L127)
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -59,28 +59,6 @@ def do_request_handling():
|
|||||||
logger.debug("phew")
|
logger.debug("phew")
|
||||||
```
|
```
|
||||||
|
|
||||||
### The `sentinel` context
|
|
||||||
|
|
||||||
The default logcontext is `synapse.logging.context.SENTINEL_CONTEXT`, which is an empty
|
|
||||||
sentinel value to represent the root logcontext. This is what is used when there is no
|
|
||||||
other logcontext set. The phrase "clear/reset the logcontext" means to set the current
|
|
||||||
logcontext to the `sentinel` logcontext.
|
|
||||||
|
|
||||||
No CPU/database usage metrics are recorded against the `sentinel` logcontext.
|
|
||||||
|
|
||||||
Ideally, nothing from the Synapse homeserver would be logged against the `sentinel`
|
|
||||||
logcontext as we want to know which server the logs came from. In practice, this is not
|
|
||||||
always the case yet especially outside of request handling.
|
|
||||||
|
|
||||||
Global things outside of Synapse (e.g. Twisted reactor code) should run in the
|
|
||||||
`sentinel` logcontext. It's only when it calls into application code that a logcontext
|
|
||||||
gets activated. This means the reactor should be started in the `sentinel` logcontext,
|
|
||||||
and any time an awaitable yields control back to the reactor, it should reset the
|
|
||||||
logcontext to be the `sentinel` logcontext. This is important to avoid leaking the
|
|
||||||
current logcontext to the reactor (which would then get picked up and associated with
|
|
||||||
the next thing the reactor does).
|
|
||||||
|
|
||||||
|
|
||||||
## Using logcontexts with awaitables
|
## Using logcontexts with awaitables
|
||||||
|
|
||||||
Awaitables break the linear flow of code so that there is no longer a single entry point
|
Awaitables break the linear flow of code so that there is no longer a single entry point
|
||||||
@@ -143,7 +121,8 @@ cares about.
|
|||||||
The following sections describe pitfalls and helpful patterns when
|
The following sections describe pitfalls and helpful patterns when
|
||||||
implementing these rules.
|
implementing these rules.
|
||||||
|
|
||||||
## Always await your awaitables
|
Always await your awaitables
|
||||||
|
----------------------------
|
||||||
|
|
||||||
Whenever you get an awaitable back from a function, you should `await` on
|
Whenever you get an awaitable back from a function, you should `await` on
|
||||||
it as soon as possible. Do not pass go; do not do any logging; do not
|
it as soon as possible. Do not pass go; do not do any logging; do not
|
||||||
@@ -202,171 +181,6 @@ async def sleep(seconds):
|
|||||||
return await context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
return await context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
||||||
```
|
```
|
||||||
|
|
||||||
## Deferred callbacks
|
|
||||||
|
|
||||||
When a deferred callback is called, it inherits the current logcontext. The deferred
|
|
||||||
callback chain can resume a coroutine, which if following our logcontext rules, will
|
|
||||||
restore its own logcontext, then run:
|
|
||||||
|
|
||||||
- until it yields control back to the reactor, setting the sentinel logcontext
|
|
||||||
- or until it finishes, restoring the logcontext it was started with (calling context)
|
|
||||||
|
|
||||||
This behavior creates two specific issues:
|
|
||||||
|
|
||||||
**Issue 1:** The first issue is that the callback may have reset the logcontext to the
|
|
||||||
sentinel before returning. This means our calling function will continue with the
|
|
||||||
sentinel logcontext instead of the logcontext it was started with (bad).
|
|
||||||
|
|
||||||
**Issue 2:** The second issue is that the current logcontext that called the deferred
|
|
||||||
callback could finish before the callback finishes (bad).
|
|
||||||
|
|
||||||
In the following example, the deferred callback is called with the "main" logcontext and
|
|
||||||
runs until we yield control back to the reactor in the `await` inside `clock.sleep(0)`.
|
|
||||||
Since `clock.sleep(0)` follows our logcontext rules, it sets the logcontext to the
|
|
||||||
sentinel before yielding control back to the reactor. Our `main` function continues with
|
|
||||||
the sentinel logcontext (first bad thing) instead of the "main" logcontext. Then the
|
|
||||||
`with LoggingContext("main")` block exits, finishing the "main" logcontext and yielding
|
|
||||||
control back to the reactor again. Finally, later on when `clock.sleep(0)` completes,
|
|
||||||
our `with LoggingContext("competing")` block exits, and restores the previous "main"
|
|
||||||
logcontext which has already finished, resulting in `WARNING: Re-starting finished log
|
|
||||||
context main` and leaking the `main` logcontext into the reactor which will then
|
|
||||||
erronously be associated with the next task the reactor picks up.
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def competing_callback():
|
|
||||||
# Since this is run with the "main" logcontext, when the "competing"
|
|
||||||
# logcontext exits, it will restore the previous "main" logcontext which has
|
|
||||||
# already finished and results in "WARNING: Re-starting finished log context main"
|
|
||||||
# and leaking the `main` logcontext into the reactor.
|
|
||||||
with LoggingContext("competing"):
|
|
||||||
await clock.sleep(0)
|
|
||||||
|
|
||||||
def main():
|
|
||||||
with LoggingContext("main"):
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
|
||||||
# Call the callback within the "main" logcontext.
|
|
||||||
d.callback(None)
|
|
||||||
# Bad: This will be logged against sentinel logcontext
|
|
||||||
logger.debug("ugh")
|
|
||||||
|
|
||||||
main()
|
|
||||||
```
|
|
||||||
|
|
||||||
**Solution 1:** We could of course fix this by following the general rule of "always
|
|
||||||
await your awaitables":
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def main():
|
|
||||||
with LoggingContext("main"):
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
|
||||||
d.callback(None)
|
|
||||||
# Wait for `d` to finish before continuing so the "main" logcontext is
|
|
||||||
# still active. This works because `d` already follows our logcontext
|
|
||||||
# rules. If not, we would also have to use `make_deferred_yieldable(d)`.
|
|
||||||
await d
|
|
||||||
# Good: This will be logged against the "main" logcontext
|
|
||||||
logger.debug("phew")
|
|
||||||
```
|
|
||||||
|
|
||||||
**Solution 2:** We could also fix this by surrounding the call to `d.callback` with a
|
|
||||||
`PreserveLoggingContext`, which will reset the logcontext to the sentinel before calling
|
|
||||||
the callback, and restore the "main" logcontext afterwards before continuing the `main`
|
|
||||||
function. This solves the problem because when the "competing" logcontext exits, it will
|
|
||||||
restore the sentinel logcontext which is never finished by its nature, so there is no
|
|
||||||
warning and no leakage into the reactor.
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def main():
|
|
||||||
with LoggingContext("main"):
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
|
||||||
d.callback(None)
|
|
||||||
with PreserveLoggingContext():
|
|
||||||
# Call the callback with the sentinel logcontext.
|
|
||||||
d.callback(None)
|
|
||||||
# Good: This will be logged against the "main" logcontext
|
|
||||||
logger.debug("phew")
|
|
||||||
```
|
|
||||||
|
|
||||||
**Solution 3:** But let's say you *do* want to run (fire-and-forget) the deferred
|
|
||||||
callback in the current context without running into issues:
|
|
||||||
|
|
||||||
We can solve the first issue by using `run_in_background(...)` to run the callback in
|
|
||||||
the current logcontext and it handles the magic behind the scenes of a) restoring the
|
|
||||||
calling logcontext before returning to the caller and b) resetting the logcontext to the
|
|
||||||
sentinel after the deferred completes and we yield control back to the reactor to avoid
|
|
||||||
leaking the logcontext into the reactor.
|
|
||||||
|
|
||||||
To solve the second issue, we can extend the lifetime of the "main" logcontext by
|
|
||||||
avoiding the `LoggingContext`'s context manager lifetime methods
|
|
||||||
(`__enter__`/`__exit__`). We can still set "main" as the current logcontext by using
|
|
||||||
`PreserveLoggingContext` and passing in the "main" logcontext.
|
|
||||||
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def main():
|
|
||||||
main_context = LoggingContext("main")
|
|
||||||
with PreserveLoggingContext(main_context):
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
|
||||||
# The whole lambda will be run in the "main" logcontext. But we're using
|
|
||||||
# a trick to return the deferred `d` itself so that `run_in_background`
|
|
||||||
# will wait on that to complete and reset the logcontext to the sentinel
|
|
||||||
# when it does to avoid leaking the "main" logcontext into the reactor.
|
|
||||||
run_in_background(lambda: (d.callback(None), d)[1])
|
|
||||||
# Good: This will be logged against the "main" logcontext
|
|
||||||
logger.debug("phew")
|
|
||||||
|
|
||||||
...
|
|
||||||
|
|
||||||
# Wherever possible, it's best to finish the logcontext by calling `__exit__` at some
|
|
||||||
# point. This allows us to catch bugs if we later try to erroneously restart a finished
|
|
||||||
# logcontext.
|
|
||||||
#
|
|
||||||
# Since the "main" logcontext stores the `LoggingContext.previous_context` when it is
|
|
||||||
# created, we can wrap this call in `PreserveLoggingContext()` to restore the correct
|
|
||||||
# previous logcontext. Our goal is to have the calling context remain unchanged after
|
|
||||||
# finishing the "main" logcontext.
|
|
||||||
with PreserveLoggingContext():
|
|
||||||
# Finish the "main" logcontext
|
|
||||||
with main_context:
|
|
||||||
# Empty block - We're just trying to call `__exit__` on the "main" context
|
|
||||||
# manager to finish it. We can't call `__exit__` directly as the code expects us
|
|
||||||
# to `__enter__` before calling `__exit__` to `start`/`stop` things
|
|
||||||
# appropriately. And in any case, it's probably best not to call the internal
|
|
||||||
# methods directly.
|
|
||||||
pass
|
|
||||||
```
|
|
||||||
|
|
||||||
The same thing applies if you have some deferreds stored somewhere which you want to
|
|
||||||
callback in the current logcontext.
|
|
||||||
|
|
||||||
|
|
||||||
### Deferred errbacks and cancellations
|
|
||||||
|
|
||||||
The same care should be taken when calling errbacks on deferreds. An errback and
|
|
||||||
callback act the same in this regard (see section above).
|
|
||||||
|
|
||||||
```python
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addErrback(some_other_function)
|
|
||||||
d.errback(failure)
|
|
||||||
```
|
|
||||||
|
|
||||||
Additionally, cancellation is the same as directly calling the errback with a
|
|
||||||
`twisted.internet.defer.CancelledError`:
|
|
||||||
|
|
||||||
```python
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addErrback(some_other_function)
|
|
||||||
d.cancel()
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Fire-and-forget
|
## Fire-and-forget
|
||||||
|
|
||||||
Sometimes you want to fire off a chain of execution, but not wait for
|
Sometimes you want to fire off a chain of execution, but not wait for
|
||||||
@@ -548,19 +362,3 @@ chain are dropped. Dropping the the reference to an awaitable you're
|
|||||||
supposed to be awaiting is bad practice, so this doesn't
|
supposed to be awaiting is bad practice, so this doesn't
|
||||||
actually happen too much. Unfortunately, when it does happen, it will
|
actually happen too much. Unfortunately, when it does happen, it will
|
||||||
lead to leaked logcontexts which are incredibly hard to track down.
|
lead to leaked logcontexts which are incredibly hard to track down.
|
||||||
|
|
||||||
|
|
||||||
## Debugging logcontext issues
|
|
||||||
|
|
||||||
Debugging logcontext issues can be tricky as leaking or losing a logcontext will surface
|
|
||||||
downstream and can point to an unrelated part of the codebase. It's best to enable debug
|
|
||||||
logging for `synapse.logging.context.debug` (needs to be explicitly configured) and go
|
|
||||||
backwards in the logs from the point where the issue is observed to find the root cause.
|
|
||||||
|
|
||||||
`log.config.yaml`
|
|
||||||
```yaml
|
|
||||||
loggers:
|
|
||||||
# Unlike other loggers, this one needs to be explicitly configured to see debug logs.
|
|
||||||
synapse.logging.context.debug:
|
|
||||||
level: DEBUG
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ _First introduced in Synapse v1.57.0_
|
|||||||
```python
|
```python
|
||||||
async def on_account_data_updated(
|
async def on_account_data_updated(
|
||||||
user_id: str,
|
user_id: str,
|
||||||
room_id: str | None,
|
room_id: Optional[str],
|
||||||
account_data_type: str,
|
account_data_type: str,
|
||||||
content: "synapse.module_api.JsonDict",
|
content: "synapse.module_api.JsonDict",
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -82,7 +82,7 @@ class CustomAccountDataModule:
|
|||||||
async def log_new_account_data(
|
async def log_new_account_data(
|
||||||
self,
|
self,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
room_id: str | None,
|
room_id: Optional[str],
|
||||||
account_data_type: str,
|
account_data_type: str,
|
||||||
content: JsonDict,
|
content: JsonDict,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ The available account validity callbacks are:
|
|||||||
_First introduced in Synapse v1.39.0_
|
_First introduced in Synapse v1.39.0_
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def is_user_expired(user: str) -> bool | None
|
async def is_user_expired(user: str) -> Optional[bool]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when processing any authenticated request (except for logout requests). The module
|
Called when processing any authenticated request (except for logout requests). The module
|
||||||
|
|||||||
@@ -1,131 +0,0 @@
|
|||||||
# Media repository callbacks
|
|
||||||
|
|
||||||
Media repository callbacks allow module developers to customise the behaviour of the
|
|
||||||
media repository on a per user basis. Media repository callbacks can be registered
|
|
||||||
using the module API's `register_media_repository_callbacks` method.
|
|
||||||
|
|
||||||
The available media repository callbacks are:
|
|
||||||
|
|
||||||
### `get_media_config_for_user`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.132.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def get_media_config_for_user(user_id: str) -> JsonDict | None
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental . The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when processing a request from a client for the
|
|
||||||
[media config endpoint](https://spec.matrix.org/latest/client-server-api/#get_matrixclientv1mediaconfig).
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
|
|
||||||
If the callback returns a dictionary then it will be used as the body of the response to the
|
|
||||||
client.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
|
||||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
|
||||||
any of the subsequent implementations of this callback.
|
|
||||||
|
|
||||||
If no module returns a non-`None` value then the default media config will be returned.
|
|
||||||
|
|
||||||
### `is_user_allowed_to_upload_media_of_size`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.132.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def is_user_allowed_to_upload_media_of_size(user_id: str, size: int) -> bool
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental . The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called before media is accepted for upload from a user, in case the module needs to
|
|
||||||
enforce a different limit for the particular user.
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
* `size`: The size in bytes of media that is being requested to upload.
|
|
||||||
|
|
||||||
If the module returns `False`, the current request will be denied with the error code
|
|
||||||
`M_TOO_LARGE` and the HTTP status code 413.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a callback
|
|
||||||
returns `True`, Synapse falls through to the next one. The value of the first callback that
|
|
||||||
returns `False` will be used. If this happens, Synapse will not call any of the subsequent
|
|
||||||
implementations of this callback.
|
|
||||||
|
|
||||||
### `get_media_upload_limits_for_user`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.139.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def get_media_upload_limits_for_user(user_id: str, size: int) -> list[synapse.module_api.MediaUploadLimit] | None
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental. The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when processing a request to store content in the media repository. This can be used to dynamically override
|
|
||||||
the [media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits).
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
|
|
||||||
If the callback returns a list then it will be used as the limits instead of those in the configuration (if any).
|
|
||||||
|
|
||||||
If an empty list is returned then no limits are applied (**warning:** users will be able
|
|
||||||
to upload as much data as they desire).
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
|
||||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
|
||||||
any of the subsequent implementations of this callback.
|
|
||||||
|
|
||||||
If there are no registered modules, or if all modules return `None`, then
|
|
||||||
the default
|
|
||||||
[media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits)
|
|
||||||
will be used.
|
|
||||||
|
|
||||||
### `on_media_upload_limit_exceeded`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.139.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def on_media_upload_limit_exceeded(user_id: str, limit: synapse.module_api.MediaUploadLimit, sent_bytes: int, attempted_bytes: int) -> None
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental. The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when a user attempts to upload media that would exceed a
|
|
||||||
[configured media upload limit](../usage/configuration/config_documentation.html#media_upload_limits).
|
|
||||||
|
|
||||||
This callback will only be called on workers which handle
|
|
||||||
[POST /_matrix/media/v3/upload](https://spec.matrix.org/v1.15/client-server-api/#post_matrixmediav3upload)
|
|
||||||
requests.
|
|
||||||
|
|
||||||
This could be used to inform the user that they have reached a media upload limit through
|
|
||||||
some external method.
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
* `limit`: The `synapse.module_api.MediaUploadLimit` representing the limit that was reached.
|
|
||||||
* `sent_bytes`: The number of bytes already sent during the period of the limit.
|
|
||||||
* `attempted_bytes`: The number of bytes that the user attempted to send.
|
|
||||||
@@ -23,7 +23,12 @@ async def check_auth(
|
|||||||
user: str,
|
user: str,
|
||||||
login_type: str,
|
login_type: str,
|
||||||
login_dict: "synapse.module_api.JsonDict",
|
login_dict: "synapse.module_api.JsonDict",
|
||||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None
|
) -> Optional[
|
||||||
|
Tuple[
|
||||||
|
str,
|
||||||
|
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||||
|
]
|
||||||
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
The login type and field names should be provided by the user in the
|
The login type and field names should be provided by the user in the
|
||||||
@@ -62,7 +67,12 @@ async def check_3pid_auth(
|
|||||||
medium: str,
|
medium: str,
|
||||||
address: str,
|
address: str,
|
||||||
password: str,
|
password: str,
|
||||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None]
|
) -> Optional[
|
||||||
|
Tuple[
|
||||||
|
str,
|
||||||
|
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||||
|
]
|
||||||
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when a user attempts to register or log in with a third party identifier,
|
Called when a user attempts to register or log in with a third party identifier,
|
||||||
@@ -88,7 +98,7 @@ _First introduced in Synapse v1.46.0_
|
|||||||
```python
|
```python
|
||||||
async def on_logged_out(
|
async def on_logged_out(
|
||||||
user_id: str,
|
user_id: str,
|
||||||
device_id: str | None,
|
device_id: Optional[str],
|
||||||
access_token: str
|
access_token: str
|
||||||
) -> None
|
) -> None
|
||||||
```
|
```
|
||||||
@@ -109,7 +119,7 @@ _First introduced in Synapse v1.52.0_
|
|||||||
async def get_username_for_registration(
|
async def get_username_for_registration(
|
||||||
uia_results: Dict[str, Any],
|
uia_results: Dict[str, Any],
|
||||||
params: Dict[str, Any],
|
params: Dict[str, Any],
|
||||||
) -> str | None
|
) -> Optional[str]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when registering a new user. The module can return a username to set for the user
|
Called when registering a new user. The module can return a username to set for the user
|
||||||
@@ -170,7 +180,7 @@ _First introduced in Synapse v1.54.0_
|
|||||||
async def get_displayname_for_registration(
|
async def get_displayname_for_registration(
|
||||||
uia_results: Dict[str, Any],
|
uia_results: Dict[str, Any],
|
||||||
params: Dict[str, Any],
|
params: Dict[str, Any],
|
||||||
) -> str | None
|
) -> Optional[str]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when registering a new user. The module can return a display name to set for the
|
Called when registering a new user. The module can return a display name to set for the
|
||||||
@@ -249,7 +259,12 @@ class MyAuthProvider:
|
|||||||
username: str,
|
username: str,
|
||||||
login_type: str,
|
login_type: str,
|
||||||
login_dict: "synapse.module_api.JsonDict",
|
login_dict: "synapse.module_api.JsonDict",
|
||||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
|
) -> Optional[
|
||||||
|
Tuple[
|
||||||
|
str,
|
||||||
|
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||||
|
]
|
||||||
|
]:
|
||||||
if login_type != "my.login_type":
|
if login_type != "my.login_type":
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -261,7 +276,12 @@ class MyAuthProvider:
|
|||||||
username: str,
|
username: str,
|
||||||
login_type: str,
|
login_type: str,
|
||||||
login_dict: "synapse.module_api.JsonDict",
|
login_dict: "synapse.module_api.JsonDict",
|
||||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
|
) -> Optional[
|
||||||
|
Tuple[
|
||||||
|
str,
|
||||||
|
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||||
|
]
|
||||||
|
]:
|
||||||
if login_type != "m.login.password":
|
if login_type != "m.login.password":
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ _First introduced in Synapse v1.42.0_
|
|||||||
```python
|
```python
|
||||||
async def get_users_for_states(
|
async def get_users_for_states(
|
||||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||||
) -> dict[str, set["synapse.api.UserPresenceState"]]
|
) -> Dict[str, Set["synapse.api.UserPresenceState"]]
|
||||||
```
|
```
|
||||||
**Requires** `get_interested_users` to also be registered
|
**Requires** `get_interested_users` to also be registered
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ _First introduced in Synapse v1.42.0_
|
|||||||
```python
|
```python
|
||||||
async def get_interested_users(
|
async def get_interested_users(
|
||||||
user_id: str
|
user_id: str
|
||||||
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS"
|
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]
|
||||||
```
|
```
|
||||||
**Requires** `get_users_for_states` to also be registered
|
**Requires** `get_users_for_states` to also be registered
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ that `@alice:example.org` receives all presence updates from `@bob:example.com`
|
|||||||
`@charlie:somewhere.org`, regardless of whether Alice shares a room with any of them.
|
`@charlie:somewhere.org`, regardless of whether Alice shares a room with any of them.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from typing import Iterable
|
from typing import Dict, Iterable, Set, Union
|
||||||
|
|
||||||
from synapse.module_api import ModuleApi
|
from synapse.module_api import ModuleApi
|
||||||
|
|
||||||
@@ -90,7 +90,7 @@ class CustomPresenceRouter:
|
|||||||
async def get_users_for_states(
|
async def get_users_for_states(
|
||||||
self,
|
self,
|
||||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||||
) -> dict[str, set["synapse.api.UserPresenceState"]]:
|
) -> Dict[str, Set["synapse.api.UserPresenceState"]]:
|
||||||
res = {}
|
res = {}
|
||||||
for update in state_updates:
|
for update in state_updates:
|
||||||
if (
|
if (
|
||||||
@@ -104,7 +104,7 @@ class CustomPresenceRouter:
|
|||||||
async def get_interested_users(
|
async def get_interested_users(
|
||||||
self,
|
self,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS":
|
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]:
|
||||||
if user_id == "@alice:example.com":
|
if user_id == "@alice:example.com":
|
||||||
return {"@bob:example.com", "@charlie:somewhere.org"}
|
return {"@bob:example.com", "@charlie:somewhere.org"}
|
||||||
|
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
# Ratelimit callbacks
|
|
||||||
|
|
||||||
Ratelimit callbacks allow module developers to override ratelimit settings dynamically whilst
|
|
||||||
Synapse is running. Ratelimit callbacks can be registered using the module API's
|
|
||||||
`register_ratelimit_callbacks` method.
|
|
||||||
|
|
||||||
The available ratelimit callbacks are:
|
|
||||||
|
|
||||||
### `get_ratelimit_override_for_user`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.132.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> synapse.module_api.RatelimitOverride | None
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental . The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when constructing a ratelimiter of a particular type for a user. The module can
|
|
||||||
return a `messages_per_second` and `burst_count` to be used, or `None` if
|
|
||||||
the default settings are adequate. The user is represented by their Matrix user ID
|
|
||||||
(e.g. `@alice:example.com`). The limiter name is usually taken from the `RatelimitSettings` key
|
|
||||||
value.
|
|
||||||
|
|
||||||
The limiters that are currently supported are:
|
|
||||||
|
|
||||||
- `rc_invites.per_room`
|
|
||||||
- `rc_invites.per_user`
|
|
||||||
- `rc_invites.per_issuer`
|
|
||||||
|
|
||||||
The `RatelimitOverride` return type has the following fields:
|
|
||||||
|
|
||||||
- `per_second: float`. The number of actions that can be performed in a second. `0.0` means that ratelimiting is disabled.
|
|
||||||
- `burst_count: int`. The number of actions that can be performed before being limited.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
|
||||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
|
||||||
any of the subsequent implementations of this callback. If no module returns a non-`None` value
|
|
||||||
then the default settings will be used.
|
|
||||||
@@ -80,8 +80,6 @@ Called when processing an invitation, both when one is created locally or when
|
|||||||
receiving an invite over federation. Both inviter and invitee are represented by
|
receiving an invite over federation. Both inviter and invitee are represented by
|
||||||
their Matrix user ID (e.g. `@alice:example.com`).
|
their Matrix user ID (e.g. `@alice:example.com`).
|
||||||
|
|
||||||
Note that federated invites will call `federated_user_may_invite` before this callback.
|
|
||||||
|
|
||||||
|
|
||||||
The callback must return one of:
|
The callback must return one of:
|
||||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||||
@@ -99,34 +97,6 @@ be used. If this happens, Synapse will not call any of the subsequent implementa
|
|||||||
this callback.
|
this callback.
|
||||||
|
|
||||||
|
|
||||||
### `federated_user_may_invite`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.133.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def federated_user_may_invite(event: "synapse.events.EventBase") -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
|
||||||
```
|
|
||||||
|
|
||||||
Called when processing an invitation received over federation. Unlike `user_may_invite`,
|
|
||||||
this callback receives the entire event, including any stripped state in the `unsigned`
|
|
||||||
section, not just the room and user IDs.
|
|
||||||
|
|
||||||
The callback must return one of:
|
|
||||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
|
||||||
decide to reject it.
|
|
||||||
- `synapse.module_api.errors.Codes` to reject the operation with an error code. In case
|
|
||||||
of doubt, `synapse.module_api.errors.Codes.FORBIDDEN` is a good error code.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one.
|
|
||||||
The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will
|
|
||||||
be used. If this happens, Synapse will not call any of the subsequent implementations of
|
|
||||||
this callback.
|
|
||||||
|
|
||||||
If all of the callbacks return `synapse.module_api.NOT_SPAM`, Synapse will also fall
|
|
||||||
through to the `user_may_invite` callback before approving the invite.
|
|
||||||
|
|
||||||
|
|
||||||
### `user_may_send_3pid_invite`
|
### `user_may_send_3pid_invite`
|
||||||
|
|
||||||
_First introduced in Synapse v1.45.0_
|
_First introduced in Synapse v1.45.0_
|
||||||
@@ -189,21 +159,11 @@ _First introduced in Synapse v1.37.0_
|
|||||||
|
|
||||||
_Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_api.errors.Codes` can be returned by this callback. Returning a boolean is now deprecated._
|
_Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_api.errors.Codes` can be returned by this callback. Returning a boolean is now deprecated._
|
||||||
|
|
||||||
_Changed in Synapse v1.132.0: Added the `room_config` argument. Callbacks that only expect a single `user_id` argument are still supported._
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def user_may_create_room(user_id: str, room_config: synapse.module_api.JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
async def user_may_create_room(user_id: str) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when processing a room creation or room upgrade request.
|
Called when processing a room creation request.
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`).
|
|
||||||
* `room_config`: The contents of the body of the [`/createRoom` request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
|
||||||
For a [room upgrade request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3roomsroomidupgrade) it is a synthesised subset of what an equivalent
|
|
||||||
`/createRoom` request would have looked like. Specifically, it contains the `creation_content` (linking to the previous room) and `initial_state` (containing a
|
|
||||||
subset of the state of the previous room).
|
|
||||||
|
|
||||||
The callback must return one of:
|
The callback must return one of:
|
||||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||||
@@ -279,41 +239,6 @@ be used. If this happens, Synapse will not call any of the subsequent implementa
|
|||||||
this callback.
|
this callback.
|
||||||
|
|
||||||
|
|
||||||
### `user_may_send_state_event`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.132.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def user_may_send_state_event(user_id: str, room_id: str, event_type: str, state_key: str, content: JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental . The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when processing a request to [send state events](https://spec.matrix.org/latest/client-server-api/#put_matrixclientv3roomsroomidstateeventtypestatekey) to a room.
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) sending the state event.
|
|
||||||
* `room_id`: The ID of the room that the requested state event is being sent to.
|
|
||||||
* `event_type`: The requested type of event.
|
|
||||||
* `state_key`: The requested state key.
|
|
||||||
* `content`: The requested event contents.
|
|
||||||
|
|
||||||
The callback must return one of:
|
|
||||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
|
||||||
decide to reject it.
|
|
||||||
- `synapse.module_api.errors.Codes` to reject the operation with an error code. In case
|
|
||||||
of doubt, `synapse.module_api.errors.Codes.FORBIDDEN` is a good error code.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one.
|
|
||||||
The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will
|
|
||||||
be used. If this happens, Synapse will not call any of the subsequent implementations of
|
|
||||||
this callback.
|
|
||||||
|
|
||||||
|
|
||||||
### `check_username_for_spam`
|
### `check_username_for_spam`
|
||||||
|
|
||||||
@@ -331,9 +256,9 @@ search results; otherwise return `False`.
|
|||||||
The profile is represented as a dictionary with the following keys:
|
The profile is represented as a dictionary with the following keys:
|
||||||
|
|
||||||
* `user_id: str`. The Matrix ID for this user.
|
* `user_id: str`. The Matrix ID for this user.
|
||||||
* `display_name: str | None`. The user's display name, or `None` if this user
|
* `display_name: Optional[str]`. The user's display name, or `None` if this user
|
||||||
has not set a display name.
|
has not set a display name.
|
||||||
* `avatar_url: str | None`. The `mxc://` URL to the user's avatar, or `None`
|
* `avatar_url: Optional[str]`. The `mxc://` URL to the user's avatar, or `None`
|
||||||
if this user has not set an avatar.
|
if this user has not set an avatar.
|
||||||
|
|
||||||
The module is given a copy of the original dictionary, so modifying it from within the
|
The module is given a copy of the original dictionary, so modifying it from within the
|
||||||
@@ -352,10 +277,10 @@ _First introduced in Synapse v1.37.0_
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
async def check_registration_for_spam(
|
async def check_registration_for_spam(
|
||||||
email_threepid: dict | None,
|
email_threepid: Optional[dict],
|
||||||
username: str | None,
|
username: Optional[str],
|
||||||
request_info: Collection[Tuple[str, str]],
|
request_info: Collection[Tuple[str, str]],
|
||||||
auth_provider_id: str | None = None,
|
auth_provider_id: Optional[str] = None,
|
||||||
) -> "synapse.spam_checker_api.RegistrationBehaviour"
|
) -> "synapse.spam_checker_api.RegistrationBehaviour"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -428,8 +353,6 @@ callback returns `False`, Synapse falls through to the next one. The value of th
|
|||||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||||
any of the subsequent implementations of this callback.
|
any of the subsequent implementations of this callback.
|
||||||
|
|
||||||
Note that this check is applied to federation invites as of Synapse v1.130.0.
|
|
||||||
|
|
||||||
|
|
||||||
### `check_login_for_spam`
|
### `check_login_for_spam`
|
||||||
|
|
||||||
@@ -438,10 +361,10 @@ _First introduced in Synapse v1.87.0_
|
|||||||
```python
|
```python
|
||||||
async def check_login_for_spam(
|
async def check_login_for_spam(
|
||||||
user_id: str,
|
user_id: str,
|
||||||
device_id: str | None,
|
device_id: Optional[str],
|
||||||
initial_display_name: str | None,
|
initial_display_name: Optional[str],
|
||||||
request_info: Collection[tuple[str | None, str]],
|
request_info: Collection[Tuple[Optional[str], str]],
|
||||||
auth_provider_id: str | None = None,
|
auth_provider_id: Optional[str] = None,
|
||||||
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -509,7 +432,7 @@ class ListSpamChecker:
|
|||||||
resource=IsUserEvilResource(config),
|
resource=IsUserEvilResource(config),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Literal["NOT_SPAM"] | Codes:
|
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Union[Literal["NOT_SPAM"], Codes]:
|
||||||
if event.sender in self.evil_users:
|
if event.sender in self.evil_users:
|
||||||
return Codes.FORBIDDEN
|
return Codes.FORBIDDEN
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ _First introduced in Synapse v1.39.0_
|
|||||||
async def check_event_allowed(
|
async def check_event_allowed(
|
||||||
event: "synapse.events.EventBase",
|
event: "synapse.events.EventBase",
|
||||||
state_events: "synapse.types.StateMap",
|
state_events: "synapse.types.StateMap",
|
||||||
) -> tuple[bool, dict | None]
|
) -> Tuple[bool, Optional[dict]]
|
||||||
```
|
```
|
||||||
|
|
||||||
**<span style="color:red">
|
**<span style="color:red">
|
||||||
@@ -340,7 +340,7 @@ class EventCensorer:
|
|||||||
self,
|
self,
|
||||||
event: "synapse.events.EventBase",
|
event: "synapse.events.EventBase",
|
||||||
state_events: "synapse.types.StateMap",
|
state_events: "synapse.types.StateMap",
|
||||||
) -> Tuple[bool, dict | None]:
|
) -> Tuple[bool, Optional[dict]]:
|
||||||
event_dict = event.get_dict()
|
event_dict = event.get_dict()
|
||||||
new_event_content = await self.api.http_client.post_json_get_json(
|
new_event_content = await self.api.http_client.post_json_get_json(
|
||||||
uri=self._endpoint, post_json=event_dict,
|
uri=self._endpoint, post_json=event_dict,
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ such as [Github][github-idp].
|
|||||||
[auth0]: https://auth0.com/
|
[auth0]: https://auth0.com/
|
||||||
[authentik]: https://goauthentik.io/
|
[authentik]: https://goauthentik.io/
|
||||||
[lemonldap]: https://lemonldap-ng.org/
|
[lemonldap]: https://lemonldap-ng.org/
|
||||||
[pocket-id]: https://pocket-id.org/
|
|
||||||
[okta]: https://www.okta.com/
|
[okta]: https://www.okta.com/
|
||||||
[dex-idp]: https://github.com/dexidp/dex
|
[dex-idp]: https://github.com/dexidp/dex
|
||||||
[keycloak-idp]: https://www.keycloak.org/docs/latest/server_admin/#sso-protocols
|
[keycloak-idp]: https://www.keycloak.org/docs/latest/server_admin/#sso-protocols
|
||||||
@@ -50,11 +49,6 @@ setting in your configuration file.
|
|||||||
See the [configuration manual](usage/configuration/config_documentation.md#oidc_providers) for some sample settings, as well as
|
See the [configuration manual](usage/configuration/config_documentation.md#oidc_providers) for some sample settings, as well as
|
||||||
the text below for example configurations for specific providers.
|
the text below for example configurations for specific providers.
|
||||||
|
|
||||||
For setups using [`.well-known` delegation](delegate.md), make sure
|
|
||||||
[`public_baseurl`](usage/configuration/config_documentation.md#public_baseurl) is set
|
|
||||||
appropriately. If unset, Synapse defaults to `https://<server_name>/` which is used in
|
|
||||||
the OIDC callback URL.
|
|
||||||
|
|
||||||
## OIDC Back-Channel Logout
|
## OIDC Back-Channel Logout
|
||||||
|
|
||||||
Synapse supports receiving [OpenID Connect Back-Channel Logout](https://openid.net/specs/openid-connect-backchannel-1_0.html) notifications.
|
Synapse supports receiving [OpenID Connect Back-Channel Logout](https://openid.net/specs/openid-connect-backchannel-1_0.html) notifications.
|
||||||
@@ -191,7 +185,6 @@ oidc_providers:
|
|||||||
4. Note the slug of your application, Client ID and Client Secret.
|
4. Note the slug of your application, Client ID and Client Secret.
|
||||||
|
|
||||||
Note: RSA keys must be used for signing for Authentik, ECC keys do not work.
|
Note: RSA keys must be used for signing for Authentik, ECC keys do not work.
|
||||||
Note: The provider must have a signing key set and must not use an encryption key.
|
|
||||||
|
|
||||||
Synapse config:
|
Synapse config:
|
||||||
```yaml
|
```yaml
|
||||||
@@ -210,12 +203,6 @@ oidc_providers:
|
|||||||
config:
|
config:
|
||||||
localpart_template: "{{ user.preferred_username }}"
|
localpart_template: "{{ user.preferred_username }}"
|
||||||
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
|
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
|
||||||
[...]
|
|
||||||
jwt_config:
|
|
||||||
enabled: true
|
|
||||||
secret: "your client secret" # TO BE FILLED (same as `client_secret` above)
|
|
||||||
algorithm: "RS256"
|
|
||||||
# (...other fields)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Dex
|
### Dex
|
||||||
@@ -637,32 +624,6 @@ oidc_providers:
|
|||||||
|
|
||||||
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
||||||
|
|
||||||
### Pocket ID
|
|
||||||
|
|
||||||
[Pocket ID][pocket-id] is a simple OIDC provider that allows users to authenticate with their passkeys.
|
|
||||||
1. Go to `OIDC Clients`
|
|
||||||
2. Click on `Add OIDC Client`
|
|
||||||
3. Add a name, for example `Synapse`
|
|
||||||
4. Add `"https://auth.example.org/_synapse/client/oidc/callback` to `Callback URLs` # Replace `auth.example.org` with your domain
|
|
||||||
5. Click on `Save`
|
|
||||||
6. Note down your `Client ID` and `Client secret`, these will be used later
|
|
||||||
|
|
||||||
Synapse config:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
oidc_providers:
|
|
||||||
- idp_id: pocket_id
|
|
||||||
idp_name: Pocket ID
|
|
||||||
issuer: "https://auth.example.org/" # Replace with your domain
|
|
||||||
client_id: "your-client-id" # Replace with the "Client ID" you noted down before
|
|
||||||
client_secret: "your-client-secret" # Replace with the "Client secret" you noted down before
|
|
||||||
scopes: ["openid", "profile"]
|
|
||||||
user_mapping_provider:
|
|
||||||
config:
|
|
||||||
localpart_template: "{{ user.preferred_username }}"
|
|
||||||
display_name_template: "{{ user.name }}"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Shibboleth with OIDC Plugin
|
### Shibboleth with OIDC Plugin
|
||||||
|
|
||||||
[Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities.
|
[Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities.
|
||||||
|
|||||||
@@ -100,14 +100,6 @@ database:
|
|||||||
keepalives_count: 3
|
keepalives_count: 3
|
||||||
```
|
```
|
||||||
|
|
||||||
## Postgresql major version upgrades
|
|
||||||
|
|
||||||
Postgres uses separate directories for database locations between major versions (typically `/var/lib/postgresql/<version>/main`).
|
|
||||||
|
|
||||||
Therefore, it is recommended to stop Synapse and other services (MAS, etc) before upgrading Postgres major versions.
|
|
||||||
|
|
||||||
It is also strongly recommended to [back up](./usage/administration/backups.md#database) your database beforehand to ensure no data loss arising from a failed upgrade.
|
|
||||||
|
|
||||||
## Backups
|
## Backups
|
||||||
|
|
||||||
Don't forget to [back up](./usage/administration/backups.md#database) your database!
|
Don't forget to [back up](./usage/administration/backups.md#database) your database!
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ possible.
|
|||||||
#### `get_interested_users`
|
#### `get_interested_users`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def get_interested_users(self, user_id: str) -> set[str] | str
|
async def get_interested_users(self, user_id: str) -> Union[Set[str], str]
|
||||||
```
|
```
|
||||||
|
|
||||||
**Required.** An asynchronous method that is passed a single Matrix User ID. This
|
**Required.** An asynchronous method that is passed a single Matrix User ID. This
|
||||||
@@ -182,7 +182,7 @@ class ExamplePresenceRouter:
|
|||||||
async def get_interested_users(
|
async def get_interested_users(
|
||||||
self,
|
self,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
) -> set[str] | PresenceRouter.ALL_USERS:
|
) -> Union[Set[str], PresenceRouter.ALL_USERS]:
|
||||||
"""
|
"""
|
||||||
Retrieve a list of users that `user_id` is interested in receiving the
|
Retrieve a list of users that `user_id` is interested in receiving the
|
||||||
presence of. This will be in addition to those they share a room with.
|
presence of. This will be in addition to those they share a room with.
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ It is recommended to put a reverse proxy such as
|
|||||||
[Apache](https://httpd.apache.org/docs/current/mod/mod_proxy_http.html),
|
[Apache](https://httpd.apache.org/docs/current/mod/mod_proxy_http.html),
|
||||||
[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy),
|
[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy),
|
||||||
[HAProxy](https://www.haproxy.org/) or
|
[HAProxy](https://www.haproxy.org/) or
|
||||||
[relayd](https://man.openbsd.org/relayd.8) in front of Synapse.
|
[relayd](https://man.openbsd.org/relayd.8) in front of Synapse. One advantage
|
||||||
This has the advantage of being able to expose the default HTTPS port (443) to Matrix
|
of doing so is that it means that you can expose the default https port
|
||||||
clients without requiring Synapse to bind to a privileged port (port numbers less than
|
(443) to Matrix clients without needing to run Synapse with root
|
||||||
1024), avoiding the need for `CAP_NET_BIND_SERVICE` or running as root.
|
privileges.
|
||||||
|
|
||||||
You should configure your reverse proxy to forward requests to `/_matrix` or
|
You should configure your reverse proxy to forward requests to `/_matrix` or
|
||||||
`/_synapse/client` to Synapse, and have it set the `X-Forwarded-For` and
|
`/_synapse/client` to Synapse, and have it set the `X-Forwarded-For` and
|
||||||
@@ -86,45 +86,6 @@ server {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Nginx Proxy Manager or NPMPlus
|
|
||||||
|
|
||||||
```nginx
|
|
||||||
Add New Proxy-Host
|
|
||||||
- Tab Details
|
|
||||||
- Domain Names: matrix.example.com
|
|
||||||
- Scheme: http
|
|
||||||
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
|
|
||||||
- Forward Port: 8008
|
|
||||||
|
|
||||||
- Tab Custom locations
|
|
||||||
- Add Location
|
|
||||||
- Define Location: /_matrix
|
|
||||||
- Scheme: http
|
|
||||||
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
|
|
||||||
- Forward Port: 8008
|
|
||||||
- Click on the gear icon to display a custom configuration field. Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
|
||||||
- Enter this in the Custom Field: client_max_body_size 50M;
|
|
||||||
|
|
||||||
- Tab SSL/TLS
|
|
||||||
- Choose your SSL/TLS certificate and preferred settings.
|
|
||||||
|
|
||||||
- Tab Advanced
|
|
||||||
- Enter this in the Custom Field. This means that port 8448 no longer needs to be opened in your Firewall.
|
|
||||||
The Federation communication use now Port 443.
|
|
||||||
|
|
||||||
location /.well-known/matrix/server {
|
|
||||||
return 200 '{"m.server": "matrix.example.com:443"}';
|
|
||||||
add_header Content-Type application/json;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /.well-known/matrix/client {
|
|
||||||
return 200 '{"m.homeserver": {"base_url": "https://matrix.example.com"}}';
|
|
||||||
add_header Content-Type application/json;
|
|
||||||
add_header "Access-Control-Allow-Origin" *;
|
|
||||||
}
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
### Caddy v2
|
### Caddy v2
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -7,23 +7,8 @@ proxy is supported, not SOCKS proxy or anything else.
|
|||||||
|
|
||||||
## Configure
|
## Configure
|
||||||
|
|
||||||
The proxy settings can be configured in the homeserver configuration file via
|
The `http_proxy`, `https_proxy`, `no_proxy` environment variables are used to
|
||||||
[`http_proxy`](../usage/configuration/config_documentation.md#http_proxy),
|
specify proxy settings. The environment variable is not case sensitive.
|
||||||
[`https_proxy`](../usage/configuration/config_documentation.md#https_proxy), and
|
|
||||||
[`no_proxy_hosts`](../usage/configuration/config_documentation.md#no_proxy_hosts).
|
|
||||||
|
|
||||||
`homeserver.yaml` example:
|
|
||||||
```yaml
|
|
||||||
http_proxy: http://USERNAME:PASSWORD@10.0.1.1:8080/
|
|
||||||
https_proxy: http://USERNAME:PASSWORD@proxy.example.com:8080/
|
|
||||||
no_proxy_hosts:
|
|
||||||
- master.hostname.example.com
|
|
||||||
- 10.1.0.0/16
|
|
||||||
- 172.30.0.0/16
|
|
||||||
```
|
|
||||||
|
|
||||||
The proxy settings can also be configured via the `http_proxy`, `https_proxy`,
|
|
||||||
`no_proxy` environment variables. The environment variable is not case sensitive.
|
|
||||||
- `http_proxy`: Proxy server to use for HTTP requests.
|
- `http_proxy`: Proxy server to use for HTTP requests.
|
||||||
- `https_proxy`: Proxy server to use for HTTPS requests.
|
- `https_proxy`: Proxy server to use for HTTPS requests.
|
||||||
- `no_proxy`: Comma-separated list of hosts, IP addresses, or IP ranges in CIDR
|
- `no_proxy`: Comma-separated list of hosts, IP addresses, or IP ranges in CIDR
|
||||||
@@ -59,7 +44,7 @@ The proxy will be **used** for:
|
|||||||
- phone-home stats
|
- phone-home stats
|
||||||
- recaptcha validation
|
- recaptcha validation
|
||||||
- CAS auth validation
|
- CAS auth validation
|
||||||
- OpenID Connect (OIDC)
|
- OpenID Connect
|
||||||
- Outbound federation
|
- Outbound federation
|
||||||
- Federation (checking public key revocation)
|
- Federation (checking public key revocation)
|
||||||
- Fetching public keys of other servers
|
- Fetching public keys of other servers
|
||||||
@@ -68,7 +53,7 @@ The proxy will be **used** for:
|
|||||||
It will **not be used** for:
|
It will **not be used** for:
|
||||||
|
|
||||||
- Application Services
|
- Application Services
|
||||||
- Matrix Identity servers
|
- Identity servers
|
||||||
- In worker configurations
|
- In worker configurations
|
||||||
- connections between workers
|
- connections between workers
|
||||||
- connections from workers to Redis
|
- connections from workers to Redis
|
||||||
|
|||||||
@@ -16,15 +16,8 @@ that your email address is probably `user@example.com` rather than
|
|||||||
`user@email.example.com`) - but doing so may require more advanced setup: see
|
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||||
[Setting up Federation](../federate.md).
|
[Setting up Federation](../federate.md).
|
||||||
|
|
||||||
⚠️ Before setting up Synapse please consult the [security page](security.md) for
|
|
||||||
best practices. ⚠️
|
|
||||||
|
|
||||||
## Installing Synapse
|
## Installing Synapse
|
||||||
|
|
||||||
Note: Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
|
||||||
and aims to follow supported upstream versions. See the [deprecation
|
|
||||||
policy](../deprecation_policy.md) for more details.
|
|
||||||
|
|
||||||
### Prebuilt packages
|
### Prebuilt packages
|
||||||
|
|
||||||
Prebuilt packages are available for a number of platforms. These are recommended
|
Prebuilt packages are available for a number of platforms. These are recommended
|
||||||
@@ -94,13 +87,17 @@ file when you upgrade the Debian package to a later version.
|
|||||||
Andrej Shadura maintains a
|
Andrej Shadura maintains a
|
||||||
[`matrix-synapse`](https://packages.debian.org/sid/matrix-synapse) package in
|
[`matrix-synapse`](https://packages.debian.org/sid/matrix-synapse) package in
|
||||||
the Debian repositories.
|
the Debian repositories.
|
||||||
For `forky` (14) and `sid` (rolling release), it can be installed simply with:
|
For `bookworm` and `sid`, it can be installed simply with:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
sudo apt install matrix-synapse
|
sudo apt install matrix-synapse
|
||||||
```
|
```
|
||||||
|
|
||||||
The downstream Debian `matrix-synapse` package is not available for `trixie` (13) and older. Consider using the Matrix.org packages (above).
|
Synapse is also available in `bullseye-backports`. Please
|
||||||
|
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
||||||
|
for information on how to use backports.
|
||||||
|
|
||||||
|
`matrix-synapse` is no longer maintained for `buster` and older.
|
||||||
|
|
||||||
##### Downstream Ubuntu packages
|
##### Downstream Ubuntu packages
|
||||||
|
|
||||||
@@ -211,7 +208,7 @@ When following this route please make sure that the [Platform-specific prerequis
|
|||||||
System requirements:
|
System requirements:
|
||||||
|
|
||||||
- POSIX-compliant system (tested on Linux & OS X)
|
- POSIX-compliant system (tested on Linux & OS X)
|
||||||
- Python 3.10 or later, up to Python 3.13.
|
- Python 3.9 or later, up to Python 3.13.
|
||||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||||
|
|
||||||
If building on an uncommon architecture for which pre-built wheels are
|
If building on an uncommon architecture for which pre-built wheels are
|
||||||
@@ -289,7 +286,7 @@ Installing prerequisites on Ubuntu or Debian:
|
|||||||
```sh
|
```sh
|
||||||
sudo apt install build-essential python3-dev libffi-dev \
|
sudo apt install build-essential python3-dev libffi-dev \
|
||||||
python3-pip python3-setuptools sqlite3 \
|
python3-pip python3-setuptools sqlite3 \
|
||||||
libssl-dev virtualenv libjpeg-dev libxslt1-dev
|
libssl-dev virtualenv libjpeg-dev libxslt1-dev libicu-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
##### ArchLinux
|
##### ArchLinux
|
||||||
@@ -298,7 +295,7 @@ Installing prerequisites on ArchLinux:
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
sudo pacman -S base-devel python python-pip \
|
sudo pacman -S base-devel python python-pip \
|
||||||
python-setuptools python-virtualenv sqlite3
|
python-setuptools python-virtualenv sqlite3 icu
|
||||||
```
|
```
|
||||||
|
|
||||||
##### CentOS/Fedora
|
##### CentOS/Fedora
|
||||||
@@ -308,22 +305,18 @@ Installing prerequisites on CentOS or Fedora Linux:
|
|||||||
```sh
|
```sh
|
||||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
|
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
|
||||||
python3-virtualenv libffi-devel openssl-devel python3-devel
|
python3-virtualenv libffi-devel openssl-devel python3-devel \
|
||||||
|
libicu-devel
|
||||||
sudo dnf group install "Development Tools"
|
sudo dnf group install "Development Tools"
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux
|
##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux
|
||||||
|
|
||||||
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux.
|
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
|
||||||
The distributions are 1:1 binary compatible.*
|
|
||||||
|
|
||||||
It's recommended to use the latest Python versions.
|
It's recommended to use the latest Python versions.
|
||||||
|
|
||||||
RHEL 8 & 9 in particular ship with Python 3.6 & 3.9 respectively by default
|
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ships with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
|
||||||
which are EOL and therefore no longer supported by Synapse.
|
|
||||||
However, newer Python versions provide significant performance improvements
|
|
||||||
and they're available in official distributions' repositories.
|
|
||||||
Therefore it's recommended to use them.
|
|
||||||
|
|
||||||
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
||||||
|
|
||||||
@@ -340,7 +333,7 @@ dnf install python3.12 python3.12-devel
|
|||||||
```
|
```
|
||||||
Finally, install common prerequisites
|
Finally, install common prerequisites
|
||||||
```bash
|
```bash
|
||||||
dnf install libpq5 libpq5-devel lz4 pkgconf
|
dnf install libicu libicu-devel libpq5 libpq5-devel lz4 pkgconf
|
||||||
dnf group install "Development Tools"
|
dnf group install "Development Tools"
|
||||||
```
|
```
|
||||||
###### Using venv module instead of virtualenv command
|
###### Using venv module instead of virtualenv command
|
||||||
@@ -372,6 +365,20 @@ xcode-select --install
|
|||||||
|
|
||||||
Some extra dependencies may be needed. You can use Homebrew (https://brew.sh) for them.
|
Some extra dependencies may be needed. You can use Homebrew (https://brew.sh) for them.
|
||||||
|
|
||||||
|
You may need to install icu, and make the icu binaries and libraries accessible.
|
||||||
|
Please follow [the official instructions of PyICU](https://pypi.org/project/PyICU/) to do so.
|
||||||
|
|
||||||
|
If you're struggling to get icu discovered, and see:
|
||||||
|
```
|
||||||
|
RuntimeError:
|
||||||
|
Please install pkg-config on your system or set the ICU_VERSION environment
|
||||||
|
variable to the version of ICU you have installed.
|
||||||
|
```
|
||||||
|
despite it being installed and having your `PATH` updated, you can omit this dependency by
|
||||||
|
not specifying `--extras all` to `poetry`. If using postgres, you can install Synapse via
|
||||||
|
`poetry install --extras saml2 --extras oidc --extras postgres --extras opentracing --extras redis --extras sentry`.
|
||||||
|
ICU is not a hard dependency on getting a working installation.
|
||||||
|
|
||||||
On ARM-based Macs you may also need to install libjpeg and libpq:
|
On ARM-based Macs you may also need to install libjpeg and libpq:
|
||||||
```sh
|
```sh
|
||||||
brew install jpeg libpq
|
brew install jpeg libpq
|
||||||
@@ -393,7 +400,8 @@ Installing prerequisites on openSUSE:
|
|||||||
```sh
|
```sh
|
||||||
sudo zypper in -t pattern devel_basis
|
sudo zypper in -t pattern devel_basis
|
||||||
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||||
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
python-devel libffi-devel libopenssl-devel libjpeg62-devel \
|
||||||
|
libicu-devel
|
||||||
```
|
```
|
||||||
|
|
||||||
##### OpenBSD
|
##### OpenBSD
|
||||||
|
|||||||
@@ -1,41 +0,0 @@
|
|||||||
# Security
|
|
||||||
|
|
||||||
This page lays out security best-practices when running Synapse.
|
|
||||||
|
|
||||||
If you believe you have encountered a security issue, see our [Security
|
|
||||||
Disclosure Policy](https://element.io/en/security/security-disclosure-policy).
|
|
||||||
|
|
||||||
## Content repository
|
|
||||||
|
|
||||||
Matrix serves raw, user-supplied data in some APIs — specifically the [content
|
|
||||||
repository endpoints](https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid).
|
|
||||||
|
|
||||||
Whilst we make a reasonable effort to mitigate against XSS attacks (for
|
|
||||||
instance, by using [CSP](https://github.com/matrix-org/synapse/pull/1021)), a
|
|
||||||
Matrix homeserver should not be hosted on a domain hosting other web
|
|
||||||
applications. This especially applies to sharing the domain with Matrix web
|
|
||||||
clients and other sensitive applications like webmail. See
|
|
||||||
https://developer.github.com/changes/2014-04-25-user-content-security for more
|
|
||||||
information.
|
|
||||||
|
|
||||||
Ideally, the homeserver should not simply be on a different subdomain, but on a
|
|
||||||
completely different [registered
|
|
||||||
domain](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3)
|
|
||||||
(also known as top-level site or eTLD+1). This is because [some
|
|
||||||
attacks](https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie)
|
|
||||||
are still possible as long as the two applications share the same registered
|
|
||||||
domain.
|
|
||||||
|
|
||||||
|
|
||||||
To illustrate this with an example, if your Element Web or other sensitive web
|
|
||||||
application is hosted on `A.example1.com`, you should ideally host Synapse on
|
|
||||||
`example2.com`. Some amount of protection is offered by hosting on
|
|
||||||
`B.example1.com` instead, so this is also acceptable in some scenarios.
|
|
||||||
However, you should *not* host your Synapse on `A.example1.com`.
|
|
||||||
|
|
||||||
Note that all of the above refers exclusively to the domain used in Synapse's
|
|
||||||
`public_baseurl` setting. In particular, it has no bearing on the domain
|
|
||||||
mentioned in MXIDs hosted on that server.
|
|
||||||
|
|
||||||
Following this advice ensures that even if an XSS is found in Synapse, the
|
|
||||||
impact to other applications will be minimal.
|
|
||||||
@@ -88,8 +88,7 @@ This will install and start a systemd service called `coturn`.
|
|||||||
denied-peer-ip=172.16.0.0-172.31.255.255
|
denied-peer-ip=172.16.0.0-172.31.255.255
|
||||||
|
|
||||||
# recommended additional local peers to block, to mitigate external access to internal services.
|
# recommended additional local peers to block, to mitigate external access to internal services.
|
||||||
# https://www.enablesecurity.com/blog/slack-webrtc-turn-compromise-and-bug-bounty/#how-to-fix-an-open-turn-relay-to-address-this-vulnerability
|
# https://www.rtcsec.com/article/slack-webrtc-turn-compromise-and-bug-bounty/#how-to-fix-an-open-turn-relay-to-address-this-vulnerability
|
||||||
# https://www.enablesecurity.com/blog/cve-2020-26262-bypass-of-coturns-access-control-protection/#further-concerns-what-else
|
|
||||||
no-multicast-peers
|
no-multicast-peers
|
||||||
denied-peer-ip=0.0.0.0-0.255.255.255
|
denied-peer-ip=0.0.0.0-0.255.255.255
|
||||||
denied-peer-ip=100.64.0.0-100.127.255.255
|
denied-peer-ip=100.64.0.0-100.127.255.255
|
||||||
@@ -102,14 +101,6 @@ This will install and start a systemd service called `coturn`.
|
|||||||
denied-peer-ip=198.51.100.0-198.51.100.255
|
denied-peer-ip=198.51.100.0-198.51.100.255
|
||||||
denied-peer-ip=203.0.113.0-203.0.113.255
|
denied-peer-ip=203.0.113.0-203.0.113.255
|
||||||
denied-peer-ip=240.0.0.0-255.255.255.255
|
denied-peer-ip=240.0.0.0-255.255.255.255
|
||||||
denied-peer-ip=::1
|
|
||||||
denied-peer-ip=64:ff9b::-64:ff9b::ffff:ffff
|
|
||||||
denied-peer-ip=::ffff:0.0.0.0-::ffff:255.255.255.255
|
|
||||||
denied-peer-ip=100::-100::ffff:ffff:ffff:ffff
|
|
||||||
denied-peer-ip=2001::-2001:1ff:ffff:ffff:ffff:ffff:ffff:ffff
|
|
||||||
denied-peer-ip=2002::-2002:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
|
||||||
denied-peer-ip=fc00::-fdff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
|
||||||
denied-peer-ip=fe80::-febf:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
|
||||||
|
|
||||||
# special case the turn server itself so that client->TURN->TURN->client flows work
|
# special case the turn server itself so that client->TURN->TURN->client flows work
|
||||||
# this should be one of the turn server's listening IPs
|
# this should be one of the turn server's listening IPs
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ class ExampleSpamChecker:
|
|||||||
async def user_may_invite(self, inviter_userid, invitee_userid, room_id):
|
async def user_may_invite(self, inviter_userid, invitee_userid, room_id):
|
||||||
return True # allow all invites
|
return True # allow all invites
|
||||||
|
|
||||||
async def user_may_create_room(self, userid, room_config):
|
async def user_may_create_room(self, userid):
|
||||||
return True # allow all room creations
|
return True # allow all room creations
|
||||||
|
|
||||||
async def user_may_create_room_alias(self, userid, room_alias):
|
async def user_may_create_room_alias(self, userid, room_alias):
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ handlers:
|
|||||||
loggers:
|
loggers:
|
||||||
synapse:
|
synapse:
|
||||||
level: INFO
|
level: INFO
|
||||||
handlers: [file]
|
handlers: [remote]
|
||||||
synapse.storage.SQL:
|
synapse.storage.SQL:
|
||||||
level: WARNING
|
level: WARNING
|
||||||
```
|
```
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user