mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-19 02:20:44 +00:00
Compare commits
6 Commits
travis/pre
...
dkasak/par
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b599ccb73 | ||
|
|
0c67c41b7b | ||
|
|
47a5799b58 | ||
|
|
a5f61b044b | ||
|
|
fd8c9aa1e2 | ||
|
|
d43f6df650 |
@@ -1,10 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
set -xeu
|
|
||||||
|
|
||||||
# On 32-bit Linux platforms, we need libatomic1 to use rustup
|
|
||||||
if command -v yum &> /dev/null; then
|
|
||||||
yum install -y libatomic
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Install a Rust toolchain
|
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal
|
|
||||||
147
.ci/scripts/auditwheel_wrapper.py
Executable file
147
.ci/scripts/auditwheel_wrapper.py
Executable file
@@ -0,0 +1,147 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
#
|
||||||
|
# Copyright (C) 2023 New Vector, Ltd
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
# published by the Free Software Foundation, either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# See the GNU Affero General Public License for more details:
|
||||||
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
#
|
||||||
|
# Originally licensed under the Apache License, Version 2.0:
|
||||||
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||||
|
#
|
||||||
|
# [This file includes modifications made by New Vector Limited]
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
|
||||||
|
# compatible wheel, if so rename the wheel before repairing it.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
from typing import Optional
|
||||||
|
from zipfile import ZipFile
|
||||||
|
|
||||||
|
from packaging.tags import Tag
|
||||||
|
from packaging.utils import parse_wheel_filename
|
||||||
|
from packaging.version import Version
|
||||||
|
|
||||||
|
|
||||||
|
def check_is_abi3_compatible(wheel_file: str) -> None:
|
||||||
|
"""Check the contents of the built wheel for any `.so` files that are *not*
|
||||||
|
abi3 compatible.
|
||||||
|
"""
|
||||||
|
|
||||||
|
with ZipFile(wheel_file, "r") as wheel:
|
||||||
|
for file in wheel.namelist():
|
||||||
|
if not file.endswith(".so"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not file.endswith(".abi3.so"):
|
||||||
|
raise Exception(f"Found non-abi3 lib: {file}")
|
||||||
|
|
||||||
|
|
||||||
|
def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
||||||
|
"""Replaces the cpython wheel file with a ABI3 compatible wheel"""
|
||||||
|
|
||||||
|
if tag.abi == "abi3":
|
||||||
|
# Nothing to do.
|
||||||
|
return wheel_file
|
||||||
|
|
||||||
|
check_is_abi3_compatible(wheel_file)
|
||||||
|
|
||||||
|
# HACK: it seems that some older versions of pip will consider a wheel marked
|
||||||
|
# as macosx_11_0 as incompatible with Big Sur. I haven't done the full archaeology
|
||||||
|
# here; there are some clues in
|
||||||
|
# https://github.com/pantsbuild/pants/pull/12857
|
||||||
|
# https://github.com/pypa/pip/issues/9138
|
||||||
|
# https://github.com/pypa/packaging/pull/319
|
||||||
|
# Empirically this seems to work, note that macOS 11 and 10.16 are the same,
|
||||||
|
# both versions are valid for backwards compatibility.
|
||||||
|
platform = tag.platform.replace("macosx_11_0", "macosx_10_16")
|
||||||
|
abi3_tag = Tag(tag.interpreter, "abi3", platform)
|
||||||
|
|
||||||
|
dirname = os.path.dirname(wheel_file)
|
||||||
|
new_wheel_file = os.path.join(
|
||||||
|
dirname,
|
||||||
|
f"{name}-{version}-{abi3_tag}.whl",
|
||||||
|
)
|
||||||
|
|
||||||
|
os.rename(wheel_file, new_wheel_file)
|
||||||
|
|
||||||
|
print("Renamed wheel to", new_wheel_file)
|
||||||
|
|
||||||
|
return new_wheel_file
|
||||||
|
|
||||||
|
|
||||||
|
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
|
||||||
|
"""Entry point"""
|
||||||
|
|
||||||
|
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
||||||
|
# normalizes the package name (i.e. it converts matrix_synapse ->
|
||||||
|
# matrix-synapse), which is not what we want.
|
||||||
|
_, version, build, tags = parse_wheel_filename(os.path.basename(wheel_file))
|
||||||
|
name = os.path.basename(wheel_file).split("-")[0]
|
||||||
|
|
||||||
|
if len(tags) != 1:
|
||||||
|
# We expect only a wheel file with only a single tag
|
||||||
|
raise Exception(f"Unexpectedly found multiple tags: {tags}")
|
||||||
|
|
||||||
|
tag = next(iter(tags))
|
||||||
|
|
||||||
|
if build:
|
||||||
|
# We don't use build tags in Synapse
|
||||||
|
raise Exception(f"Unexpected build tag: {build}")
|
||||||
|
|
||||||
|
# If the wheel is for cpython then convert it into an abi3 wheel.
|
||||||
|
if tag.interpreter.startswith("cp"):
|
||||||
|
wheel_file = cpython(wheel_file, name, version, tag)
|
||||||
|
|
||||||
|
# Finally, repair the wheel.
|
||||||
|
if archs is not None:
|
||||||
|
# If we are given archs then we are on macos and need to use
|
||||||
|
# `delocate-listdeps`.
|
||||||
|
subprocess.run(["delocate-listdeps", wheel_file], check=True)
|
||||||
|
subprocess.run(
|
||||||
|
["delocate-wheel", "--require-archs", archs, "-w", dest_dir, wheel_file],
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
subprocess.run(["auditwheel", "repair", "-w", dest_dir, wheel_file], check=True)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(description="Tag wheel as abi3 and repair it.")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--wheel-dir",
|
||||||
|
"-w",
|
||||||
|
metavar="WHEEL_DIR",
|
||||||
|
help="Directory to store delocated wheels",
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--require-archs",
|
||||||
|
metavar="archs",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"wheel_file",
|
||||||
|
metavar="WHEEL_FILE",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
wheel_file = args.wheel_file
|
||||||
|
wheel_dir = args.wheel_dir
|
||||||
|
archs = args.require_archs
|
||||||
|
|
||||||
|
main(wheel_file, wheel_dir, archs)
|
||||||
@@ -35,58 +35,49 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
|||||||
|
|
||||||
# First calculate the various trial jobs.
|
# First calculate the various trial jobs.
|
||||||
#
|
#
|
||||||
# For PRs, we only run each type of test with the oldest and newest Python
|
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||||
# version that's supported. The oldest version ensures we don't accidentally
|
# is Python 3.8 right now)
|
||||||
# introduce syntax or code that's too new, and the newest ensures we don't use
|
|
||||||
# code that's been dropped in the latest supported Python version.
|
|
||||||
|
|
||||||
trial_sqlite_tests = [
|
trial_sqlite_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.8",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"python-version": "3.14",
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "all",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
if not IS_PR:
|
if not IS_PR:
|
||||||
# Otherwise, check all supported Python versions.
|
|
||||||
#
|
|
||||||
# Avoiding running all of these versions on every PR saves on CI time.
|
|
||||||
trial_sqlite_tests.extend(
|
trial_sqlite_tests.extend(
|
||||||
{
|
{
|
||||||
"python-version": version,
|
"python-version": version,
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
for version in ("3.11", "3.12", "3.13")
|
for version in ("3.9", "3.10", "3.11", "3.12")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only test postgres against the earliest and latest Python versions that we
|
|
||||||
# support in order to save on CI time.
|
|
||||||
trial_postgres_tests = [
|
trial_postgres_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.8",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "14",
|
"postgres-version": "11",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"python-version": "3.14",
|
|
||||||
"database": "postgres",
|
|
||||||
"postgres-version": "17",
|
|
||||||
"extras": "all",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Ensure that Synapse passes unit tests even with no extra dependencies installed.
|
if not IS_PR:
|
||||||
|
trial_postgres_tests.append(
|
||||||
|
{
|
||||||
|
"python-version": "3.12",
|
||||||
|
"database": "postgres",
|
||||||
|
"postgres-version": "16",
|
||||||
|
"extras": "all",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
trial_no_extra_tests = [
|
trial_no_extra_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.8",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "",
|
"extras": "",
|
||||||
}
|
}
|
||||||
@@ -108,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
|
|||||||
|
|
||||||
# First calculate the various sytest jobs.
|
# First calculate the various sytest jobs.
|
||||||
#
|
#
|
||||||
# For each type of test we only run on bookworm on PRs
|
# For each type of test we only run on focal on PRs
|
||||||
|
|
||||||
|
|
||||||
sytest_tests = [
|
sytest_tests = [
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
@@ -136,11 +127,11 @@ if not IS_PR:
|
|||||||
sytest_tests.extend(
|
sytest_tests.extend(
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -11,12 +11,12 @@ with open("poetry.lock", "rb") as f:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
lock_version = lockfile["metadata"]["lock-version"]
|
lock_version = lockfile["metadata"]["lock-version"]
|
||||||
assert lock_version == "2.1"
|
assert lock_version == "2.0"
|
||||||
except Exception:
|
except Exception:
|
||||||
print(
|
print(
|
||||||
"""\
|
"""\
|
||||||
Lockfile is not version 2.1. You probably need to upgrade poetry on your local box
|
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
|
||||||
and re-run `poetry lock`. See the Poetry cheat sheet at
|
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
|
||||||
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|||||||
36
.ci/scripts/prepare_old_deps.sh
Executable file
36
.ci/scripts/prepare_old_deps.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# this script is run by GitHub Actions in a plain `focal` container; it
|
||||||
|
# - installs the minimal system requirements, and poetry;
|
||||||
|
# - patches the project definition file to refer to old versions only;
|
||||||
|
# - creates a venv with these old versions using poetry; and finally
|
||||||
|
# - invokes `trial` to run the tests with old deps.
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||||
|
export VIRTUALENV_NO_DOWNLOAD=1
|
||||||
|
|
||||||
|
# TODO: in the future, we could use an implementation of
|
||||||
|
# https://github.com/python-poetry/poetry/issues/3527
|
||||||
|
# https://github.com/pypa/pip/issues/8085
|
||||||
|
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||||
|
|
||||||
|
# Patch the project definitions in-place:
|
||||||
|
# - Replace all lower and tilde bounds with exact bounds
|
||||||
|
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
||||||
|
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||||
|
# - Use pyopenssl 17.0, which is the oldest version that works with
|
||||||
|
# a `cryptography` compiled against OpenSSL 1.1.
|
||||||
|
# - Omit systemd: we're not logging to journal here.
|
||||||
|
|
||||||
|
sed -i \
|
||||||
|
-e "s/[~>]=/==/g" \
|
||||||
|
-e '/^python = "^/!s/\^/==/g' \
|
||||||
|
-e "/psycopg2/d" \
|
||||||
|
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||||
|
-e '/systemd/d' \
|
||||||
|
pyproject.toml
|
||||||
|
|
||||||
|
echo "::group::Patched pyproject.toml"
|
||||||
|
cat pyproject.toml
|
||||||
|
echo "::endgroup::"
|
||||||
@@ -61,7 +61,7 @@ poetry run update_synapse_database --database-config .ci/postgres-config-unporte
|
|||||||
echo "+++ Comparing ported schema with unported schema"
|
echo "+++ Comparing ported schema with unported schema"
|
||||||
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
||||||
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse_unported > unported.sql
|
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse > ported.sql
|
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
|
||||||
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
||||||
diff -u unported.sql ported.sql | tee schema_diff
|
diff -u unported.sql ported.sql | tee schema_diff
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# 1) Resolve project ID.
|
|
||||||
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
|
|
||||||
|
|
||||||
# 2) Find existing item (project card) for this issue.
|
|
||||||
ITEM_ID=$(
|
|
||||||
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
|
|
||||||
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
|
|
||||||
)
|
|
||||||
|
|
||||||
# 3) If one doesn't exist, add this issue to the project.
|
|
||||||
if [ -z "${ITEM_ID:-}" ]; then
|
|
||||||
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 4) Get Status field id + the option id for TARGET_STATUS.
|
|
||||||
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
|
|
||||||
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
|
|
||||||
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
|
|
||||||
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
|
|
||||||
|
|
||||||
if [ -z "${OPTION_ID:-}" ]; then
|
|
||||||
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 5) Set Status (moves item to the matching column in the board view).
|
|
||||||
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"
|
|
||||||
@@ -26,8 +26,3 @@ c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
|||||||
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
||||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||||
|
|
||||||
# Use type hinting generics in standard collections (https://github.com/element-hq/synapse/pull/19046)
|
|
||||||
fc244bb592aa481faf28214a2e2ce3bb4e95d990
|
|
||||||
|
|
||||||
# Write union types as X | Y where possible (https://github.com/element-hq/synapse/pull/19111)
|
|
||||||
fcac7e0282b074d4bd3414d1c9c181e9701875d9
|
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -2,4 +2,4 @@
|
|||||||
(using a matrix.org account if necessary). We do not use GitHub issues for
|
(using a matrix.org account if necessary). We do not use GitHub issues for
|
||||||
support.
|
support.
|
||||||
|
|
||||||
**If you want to report a security issue** please see https://element.io/security/security-disclosure-policy
|
**If you want to report a security issue** please see https://matrix.org/security-disclosure-policy/
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
2
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -7,7 +7,7 @@ body:
|
|||||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
**THIS IS NOT A SUPPORT CHANNEL!**
|
||||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
|
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
|
||||||
|
|
||||||
If you want to report a security issue, please see https://element.io/security/security-disclosure-policy
|
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
||||||
|
|
||||||
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
|
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
|
||||||
|
|
||||||
|
|||||||
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -9,4 +9,5 @@
|
|||||||
- End with either a period (.) or an exclamation mark (!).
|
- End with either a period (.) or an exclamation mark (!).
|
||||||
- Start with a capital letter.
|
- Start with a capital letter.
|
||||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
||||||
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct
|
||||||
|
(run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
||||||
|
|||||||
69
.github/dependabot.yml
vendored
69
.github/dependabot.yml
vendored
@@ -1,92 +1,23 @@
|
|||||||
version: 2
|
version: 2
|
||||||
# As dependabot is currently only run on a weekly basis, we raise the
|
|
||||||
# open-pull-requests-limit to 10 (from the default of 5) to better ensure we
|
|
||||||
# don't continuously grow a backlog of updates.
|
|
||||||
updates:
|
updates:
|
||||||
- # "pip" is the correct setting for poetry, per https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
- # "pip" is the correct setting for poetry, per https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
||||||
package-ecosystem: "pip"
|
package-ecosystem: "pip"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
open-pull-requests-limit: 10
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
# Group patch updates to packages together into a single PR, as they rarely
|
|
||||||
# if ever contain breaking changes that need to be reviewed separately.
|
|
||||||
#
|
|
||||||
# Less PRs means a streamlined review process.
|
|
||||||
#
|
|
||||||
# Python packages follow semantic versioning, and tend to only introduce
|
|
||||||
# breaking changes in major version bumps. Thus, we'll group minor and patch
|
|
||||||
# versions together.
|
|
||||||
groups:
|
|
||||||
minor-and-patches:
|
|
||||||
applies-to: version-updates
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
update-types:
|
|
||||||
- "minor"
|
|
||||||
- "patch"
|
|
||||||
# Prevent pulling packages that were recently updated to help mitigate
|
|
||||||
# supply chain attacks. 14 days was taken from the recommendation at
|
|
||||||
# https://blog.yossarian.net/2025/11/21/We-should-all-be-using-dependency-cooldowns
|
|
||||||
# where the author noted that 9/10 attacks would have been mitigated by a
|
|
||||||
# two week cooldown.
|
|
||||||
#
|
|
||||||
# The cooldown only applies to general updates; security updates will still
|
|
||||||
# be pulled in as soon as possible.
|
|
||||||
cooldown:
|
|
||||||
default-days: 14
|
|
||||||
|
|
||||||
- package-ecosystem: "docker"
|
- package-ecosystem: "docker"
|
||||||
directory: "/docker"
|
directory: "/docker"
|
||||||
open-pull-requests-limit: 10
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
# For container versions, breaking changes are also typically only introduced in major
|
|
||||||
# package bumps.
|
|
||||||
groups:
|
|
||||||
minor-and-patches:
|
|
||||||
applies-to: version-updates
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
update-types:
|
|
||||||
- "minor"
|
|
||||||
- "patch"
|
|
||||||
cooldown:
|
|
||||||
default-days: 14
|
|
||||||
|
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
open-pull-requests-limit: 10
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
# Similarly for GitHub Actions, breaking changes are typically only introduced in major
|
|
||||||
# package bumps.
|
|
||||||
groups:
|
|
||||||
minor-and-patches:
|
|
||||||
applies-to: version-updates
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
update-types:
|
|
||||||
- "minor"
|
|
||||||
- "patch"
|
|
||||||
cooldown:
|
|
||||||
default-days: 14
|
|
||||||
|
|
||||||
- package-ecosystem: "cargo"
|
- package-ecosystem: "cargo"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
open-pull-requests-limit: 10
|
|
||||||
versioning-strategy: "lockfile-only"
|
versioning-strategy: "lockfile-only"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
# The Rust ecosystem is special in that breaking changes are often introduced
|
|
||||||
# in minor version bumps, as packages typically stay pre-1.0 for a long time.
|
|
||||||
# Thus we specifically keep minor version bumps separate in their own PRs.
|
|
||||||
groups:
|
|
||||||
patches:
|
|
||||||
applies-to: version-updates
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
update-types:
|
|
||||||
- "patch"
|
|
||||||
cooldown:
|
|
||||||
default-days: 14
|
|
||||||
|
|||||||
148
.github/workflows/docker.yml
vendored
148
.github/workflows/docker.yml
vendored
@@ -5,7 +5,7 @@ name: Build docker images
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags: ["v*"]
|
tags: ["v*"]
|
||||||
branches: [master, main, develop]
|
branches: [ master, main, develop ]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
@@ -14,24 +14,26 @@ permissions:
|
|||||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build and push image for ${{ matrix.platform }}
|
runs-on: ubuntu-latest
|
||||||
runs-on: ${{ matrix.runs_on }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- platform: linux/amd64
|
|
||||||
runs_on: ubuntu-24.04
|
|
||||||
suffix: linux-amd64
|
|
||||||
- platform: linux/arm64
|
|
||||||
runs_on: ubuntu-24.04-arm
|
|
||||||
suffix: linux-arm64
|
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set up QEMU
|
||||||
|
id: qemu
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Inspect builder
|
||||||
|
run: docker buildx inspect
|
||||||
|
|
||||||
|
- name: Install Cosign
|
||||||
|
uses: sigstore/cosign-installer@v3.5.0
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Extract version from pyproject.toml
|
- name: Extract version from pyproject.toml
|
||||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||||
@@ -41,91 +43,25 @@ jobs:
|
|||||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push by digest
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
labels: |
|
|
||||||
gitsha1=${{ github.sha }}
|
|
||||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
|
||||||
tags: |
|
|
||||||
docker.io/matrixdotorg/synapse
|
|
||||||
ghcr.io/element-hq/synapse
|
|
||||||
file: "docker/Dockerfile"
|
|
||||||
platforms: ${{ matrix.platform }}
|
|
||||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
|
||||||
|
|
||||||
- name: Export digest
|
|
||||||
run: |
|
|
||||||
mkdir -p ${{ runner.temp }}/digests
|
|
||||||
digest="${{ steps.build.outputs.digest }}"
|
|
||||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
|
||||||
|
|
||||||
- name: Upload digest
|
|
||||||
uses: actions/upload-artifact@v5
|
|
||||||
with:
|
|
||||||
name: digests-${{ matrix.suffix }}
|
|
||||||
path: ${{ runner.temp }}/digests/*
|
|
||||||
if-no-files-found: error
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
merge:
|
|
||||||
name: Push merged images to ${{ matrix.repository }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
repository:
|
|
||||||
- docker.io/matrixdotorg/synapse
|
|
||||||
- ghcr.io/element-hq/synapse
|
|
||||||
|
|
||||||
needs:
|
|
||||||
- build
|
|
||||||
steps:
|
|
||||||
- name: Download digests
|
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
|
||||||
with:
|
|
||||||
path: ${{ runner.temp }}/digests
|
|
||||||
pattern: digests-*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
|
||||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Log in to GHCR
|
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
|
||||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
|
||||||
|
|
||||||
- name: Install Cosign
|
|
||||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
|
||||||
|
|
||||||
- name: Calculate docker image tag
|
- name: Calculate docker image tag
|
||||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
id: set-tag
|
||||||
|
uses: docker/metadata-action@master
|
||||||
with:
|
with:
|
||||||
images: ${{ matrix.repository }}
|
images: |
|
||||||
|
docker.io/matrixdotorg/synapse
|
||||||
|
ghcr.io/element-hq/synapse
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=false
|
latest=false
|
||||||
tags: |
|
tags: |
|
||||||
@@ -133,23 +69,31 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||||
type=pep440,pattern={{raw}}
|
type=pep440,pattern={{raw}}
|
||||||
type=sha
|
|
||||||
|
|
||||||
- name: Create manifest list and push
|
- name: Build and push all platforms
|
||||||
working-directory: ${{ runner.temp }}/digests
|
id: build-and-push
|
||||||
env:
|
uses: docker/build-push-action@v6
|
||||||
REPOSITORY: ${{ matrix.repository }}
|
with:
|
||||||
run: |
|
push: true
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
labels: |
|
||||||
$(printf "$REPOSITORY@sha256:%s " *)
|
gitsha1=${{ github.sha }}
|
||||||
|
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||||
|
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||||
|
file: "docker/Dockerfile"
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
- name: Sign each manifest
|
# arm64 builds OOM without the git fetch setting. c.f.
|
||||||
|
# https://github.com/rust-lang/cargo/issues/10583
|
||||||
|
build-args: |
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||||
|
|
||||||
|
- name: Sign the images with GitHub OIDC Token
|
||||||
env:
|
env:
|
||||||
REPOSITORY: ${{ matrix.repository }}
|
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||||
|
TAGS: ${{ steps.set-tag.outputs.tags }}
|
||||||
run: |
|
run: |
|
||||||
DIGESTS=""
|
images=""
|
||||||
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
|
for tag in ${TAGS}; do
|
||||||
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
|
images+="${tag}@${DIGEST} "
|
||||||
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
|
|
||||||
done
|
done
|
||||||
cosign sign --yes $DIGESTS
|
cosign sign --yes ${images}
|
||||||
|
|||||||
34
.github/workflows/docs-pr-netlify.yaml
vendored
Normal file
34
.github/workflows/docs-pr-netlify.yaml
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
name: Deploy documentation PR preview
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: [ "Prepare documentation PR preview" ]
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
netlify:
|
||||||
|
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||||
|
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||||
|
- name: 📥 Download artifact
|
||||||
|
uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6
|
||||||
|
with:
|
||||||
|
workflow: docs-pr.yaml
|
||||||
|
run_id: ${{ github.event.workflow_run.id }}
|
||||||
|
name: book
|
||||||
|
path: book
|
||||||
|
|
||||||
|
- name: 📤 Deploy to Netlify
|
||||||
|
uses: matrix-org/netlify-pr-preview@v3
|
||||||
|
with:
|
||||||
|
path: book
|
||||||
|
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||||
|
branch: ${{ github.event.workflow_run.head_branch }}
|
||||||
|
revision: ${{ github.event.workflow_run.head_sha }}
|
||||||
|
token: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||||
|
site_id: ${{ secrets.NETLIFY_SITE_ID }}
|
||||||
|
desc: Documentation preview
|
||||||
|
deployment_env: PR Documentation Preview
|
||||||
8
.github/workflows/docs-pr.yaml
vendored
8
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
name: GitHub Pages
|
name: GitHub Pages
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: book
|
name: book
|
||||||
path: book
|
path: book
|
||||||
@@ -50,7 +50,7 @@ jobs:
|
|||||||
name: Check links in documentation
|
name: Check links in documentation
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||||
|
|||||||
16
.github/workflows/docs.yaml
vendored
16
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- pre
|
- pre
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -64,7 +64,7 @@ jobs:
|
|||||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
@@ -78,18 +78,6 @@ jobs:
|
|||||||
mdbook build
|
mdbook build
|
||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Prepare and publish schema files
|
|
||||||
run: |
|
|
||||||
sudo apt-get update && sudo apt-get install -y yq
|
|
||||||
mkdir -p book/schema
|
|
||||||
# Remove developer notice before publishing.
|
|
||||||
rm schema/v*/Do\ not\ edit\ files\ in\ this\ folder
|
|
||||||
# Copy schema files that are independent from current Synapse version.
|
|
||||||
cp -r -t book/schema schema/v*/
|
|
||||||
# Convert config schema from YAML source file to JSON.
|
|
||||||
yq < schema/synapse-config.schema.yaml \
|
|
||||||
> book/schema/synapse-config.schema.json
|
|
||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||||
|
|||||||
34
.github/workflows/fix_lint.yaml
vendored
34
.github/workflows/fix_lint.yaml
vendored
@@ -6,11 +6,6 @@ name: Attempt to automatically fix linting errors
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
|
||||||
# We use nightly so that `fmt` correctly groups together imports, and
|
|
||||||
# clippy correctly fixes up the benchmarks.
|
|
||||||
RUST_VERSION: nightly-2025-06-24
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fixup:
|
fixup:
|
||||||
name: Fix up
|
name: Fix up
|
||||||
@@ -18,28 +13,33 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
# We use nightly so that `fmt` correctly groups together imports, and
|
||||||
components: clippy, rustfmt
|
# clippy correctly fixes up the benchmarks.
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
toolchain: nightly-2022-12-01
|
||||||
|
components: rustfmt
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
install-project: "false"
|
install-project: "false"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
- name: Run ruff check
|
- name: Import order (isort)
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: poetry run ruff check --fix .
|
run: poetry run isort .
|
||||||
|
|
||||||
- name: Run ruff format
|
- name: Code style (black)
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: poetry run ruff format --quiet .
|
run: poetry run black .
|
||||||
|
|
||||||
|
- name: Semantic checks (ruff)
|
||||||
|
continue-on-error: true
|
||||||
|
run: poetry run ruff --fix .
|
||||||
|
|
||||||
- run: cargo clippy --all-features --fix -- -D warnings
|
- run: cargo clippy --all-features --fix -- -D warnings
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
@@ -47,6 +47,6 @@ jobs:
|
|||||||
- run: cargo fmt
|
- run: cargo fmt
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
|
||||||
- uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||||
with:
|
with:
|
||||||
commit_message: "Attempt to fix linting"
|
commit_message: "Attempt to fix linting"
|
||||||
|
|||||||
49
.github/workflows/latest_deps.yml
vendored
49
.github/workflows/latest_deps.yml
vendored
@@ -21,9 +21,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
check_repo:
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||||
@@ -42,25 +39,23 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||||
# poetry-core versions), so we install with poetry.
|
# poetry-core versions), so we install with poetry.
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
# Dump installed versions for debugging.
|
# Dump installed versions for debugging.
|
||||||
- run: poetry run pip list > before.txt
|
- run: poetry run pip list > before.txt
|
||||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||||
# `pip install matrix-synapse[all]` as closely as possible.
|
# `pip install matrix-synapse[all]` as closely as possible.
|
||||||
- run: poetry update --without dev
|
- run: poetry update --no-dev
|
||||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||||
- name: Remove unhelpful options from mypy config
|
- name: Remove unhelpful options from mypy config
|
||||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||||
@@ -77,13 +72,11 @@ jobs:
|
|||||||
postgres-version: "14"
|
postgres-version: "14"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||||
@@ -93,7 +86,7 @@ jobs:
|
|||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.postgres-version }}
|
postgres:${{ matrix.postgres-version }}
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: pip install .[all,test]
|
- run: pip install .[all,test]
|
||||||
@@ -139,9 +132,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- sytest-tag: bookworm
|
- sytest-tag: focal
|
||||||
|
|
||||||
- sytest-tag: bookworm
|
- sytest-tag: focal
|
||||||
postgres: postgres
|
postgres: postgres
|
||||||
workers: workers
|
workers: workers
|
||||||
redis: redis
|
redis: redis
|
||||||
@@ -152,13 +145,11 @@ jobs:
|
|||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Ensure sytest runs `pip install`
|
- name: Ensure sytest runs `pip install`
|
||||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||||
@@ -173,7 +164,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -201,15 +192,15 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out synapse codebase
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -234,7 +225,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
|||||||
name: "Check locked dependencies have sdists"
|
name: "Check locked dependencies have sdists"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- run: pip install tomli
|
- run: pip install tomli
|
||||||
|
|||||||
10
.github/workflows/push_complement_image.yml
vendored
10
.github/workflows/push_complement_image.yml
vendored
@@ -33,29 +33,29 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout specific branch (debug build)
|
- name: Checkout specific branch (debug build)
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.branch }}
|
ref: ${{ inputs.branch }}
|
||||||
- name: Checkout clean copy of develop (scheduled build)
|
- name: Checkout clean copy of develop (scheduled build)
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule'
|
||||||
with:
|
with:
|
||||||
ref: develop
|
ref: develop
|
||||||
- name: Checkout clean copy of master (on-push)
|
- name: Checkout clean copy of master (on-push)
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
with:
|
with:
|
||||||
ref: master
|
ref: master
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Work out labels for complement image
|
- name: Work out labels for complement image
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||||
tags: |
|
tags: |
|
||||||
|
|||||||
116
.github/workflows/release-artifacts.yml
vendored
116
.github/workflows/release-artifacts.yml
vendored
@@ -5,7 +5,7 @@ name: Build release artifacts
|
|||||||
on:
|
on:
|
||||||
# we build on PRs and develop to (hopefully) get early warning
|
# we build on PRs and develop to (hopefully) get early warning
|
||||||
# of things breaking (but only build one set of debs). PRs skip
|
# of things breaking (but only build one set of debs). PRs skip
|
||||||
# building wheels on ARM.
|
# building wheels on macOS & ARM.
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches: ["develop", "release-*"]
|
branches: ["develop", "release-*"]
|
||||||
@@ -27,10 +27,10 @@ jobs:
|
|||||||
name: "Calculate list of debian distros"
|
name: "Calculate list of debian distros"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: '3.x'
|
||||||
- id: set-distros
|
- id: set-distros
|
||||||
run: |
|
run: |
|
||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||||
@@ -55,18 +55,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: src
|
path: src
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
- name: Set up docker layer caching
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
@@ -74,9 +74,9 @@ jobs:
|
|||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
- name: Set up python
|
- name: Set up python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: '3.x'
|
||||||
|
|
||||||
- name: Build the packages
|
- name: Build the packages
|
||||||
# see https://github.com/docker/build-push-action/issues/252
|
# see https://github.com/docker/build-push-action/issues/252
|
||||||
@@ -91,70 +91,74 @@ jobs:
|
|||||||
rm -rf /tmp/.buildx-cache
|
rm -rf /tmp/.buildx-cache
|
||||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||||
|
|
||||||
- name: Artifact name
|
|
||||||
id: artifact-name
|
|
||||||
# We can't have colons in the upload name of the artifact, so we convert
|
|
||||||
# e.g. `debian:sid` to `sid`.
|
|
||||||
env:
|
|
||||||
DISTRO: ${{ matrix.distro }}
|
|
||||||
run: |
|
|
||||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
- name: Upload debs as artifacts
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
||||||
with:
|
with:
|
||||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
name: debs
|
||||||
path: debs/*
|
path: debs/*
|
||||||
|
|
||||||
build-wheels:
|
build-wheels:
|
||||||
name: Build wheels on ${{ matrix.os }}
|
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os:
|
os: [ubuntu-20.04, macos-12]
|
||||||
- ubuntu-24.04
|
arch: [x86_64, aarch64]
|
||||||
- ubuntu-24.04-arm
|
|
||||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||||
# It is not read by the rest of the workflow.
|
# It is not read by the rest of the workflow.
|
||||||
is_pr:
|
is_pr:
|
||||||
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
||||||
|
|
||||||
exclude:
|
exclude:
|
||||||
|
# Don't build macos wheels on PR CI.
|
||||||
|
- is_pr: true
|
||||||
|
os: "macos-12"
|
||||||
|
# Don't build aarch64 wheels on mac.
|
||||||
|
- os: "macos-12"
|
||||||
|
arch: aarch64
|
||||||
# Don't build aarch64 wheels on PR CI.
|
# Don't build aarch64 wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "ubuntu-24.04-arm"
|
arch: aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||||
# here, because `python` on osx points to Python 2.7.
|
# here, because `python` on osx points to Python 2.7.
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
- name: Install cibuildwheel
|
||||||
run: python -m pip install cibuildwheel==3.2.1
|
run: python -m pip install cibuildwheel==2.19.1
|
||||||
|
|
||||||
|
- name: Set up QEMU to emulate aarch64
|
||||||
|
if: matrix.arch == 'aarch64'
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
|
|
||||||
|
- name: Build aarch64 wheels
|
||||||
|
if: matrix.arch == 'aarch64'
|
||||||
|
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Only build a single wheel on PR
|
- name: Only build a single wheel on PR
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV
|
run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
run: python -m cibuildwheel --output-dir wheelhouse
|
||||||
env:
|
env:
|
||||||
# The platforms that we build for are determined by the
|
# Skip testing for platforms which various libraries don't have wheels
|
||||||
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
|
# for, and so need extra build deps.
|
||||||
|
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||||
|
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||||
|
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||||
|
|
||||||
# We skip testing wheels for the following platforms in CI:
|
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
||||||
#
|
|
||||||
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
|
|
||||||
# musl: (TODO: investigate).
|
|
||||||
CIBW_TEST_SKIP: pp3*-* *musl*
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
|
||||||
with:
|
with:
|
||||||
name: Wheel-${{ matrix.os }}
|
name: Wheel
|
||||||
path: ./wheelhouse/*.whl
|
path: ./wheelhouse/*.whl
|
||||||
|
|
||||||
build-sdist:
|
build-sdist:
|
||||||
@@ -163,21 +167,22 @@ jobs:
|
|||||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: '3.10'
|
||||||
|
|
||||||
- run: pip install build
|
- run: pip install build
|
||||||
|
|
||||||
- name: Build sdist
|
- name: Build sdist
|
||||||
run: python -m build --sdist
|
run: python -m build --sdist
|
||||||
|
|
||||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
||||||
with:
|
with:
|
||||||
name: Sdist
|
name: Sdist
|
||||||
path: dist/*.tar.gz
|
path: dist/*.tar.gz
|
||||||
|
|
||||||
|
|
||||||
# if it's a tag, create a release and attach the artifacts to it
|
# if it's a tag, create a release and attach the artifacts to it
|
||||||
attach-assets:
|
attach-assets:
|
||||||
name: "Attach assets to release"
|
name: "Attach assets to release"
|
||||||
@@ -189,20 +194,19 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download all workflow run artifacts
|
- name: Download all workflow run artifacts
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
uses: actions/download-artifact@v3 # Don't upgrade to v4, it should match upload-artifact
|
||||||
- name: Build a tarball for the debs
|
- name: Build a tarball for the debs
|
||||||
# We need to merge all the debs uploads into one folder, then compress
|
run: tar -cvJf debs.tar.xz debs
|
||||||
# that.
|
|
||||||
run: |
|
|
||||||
mkdir debs
|
|
||||||
mv debs*/* debs/
|
|
||||||
tar -cvJf debs.tar.xz debs
|
|
||||||
- name: Attach to release
|
- name: Attach to release
|
||||||
|
uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
with:
|
||||||
gh release upload "${{ github.ref_name }}" \
|
files: |
|
||||||
Sdist/* \
|
Sdist/*
|
||||||
Wheel*/* \
|
Wheel/*
|
||||||
debs.tar.xz \
|
debs.tar.xz
|
||||||
--repo ${{ github.repository }}
|
# if it's not already published, keep the release as a draft.
|
||||||
|
draft: true
|
||||||
|
# mark it as a prerelease if the tag contains 'rc'.
|
||||||
|
prerelease: ${{ contains(github.ref, 'rc') }}
|
||||||
|
|||||||
57
.github/workflows/schema.yaml
vendored
57
.github/workflows/schema.yaml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: Schema
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- schema/**
|
|
||||||
- docs/usage/configuration/config_documentation.md
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
validate-schema:
|
|
||||||
name: Ensure Synapse config schema is valid
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- name: Install check-jsonschema
|
|
||||||
run: pip install check-jsonschema==0.33.0
|
|
||||||
|
|
||||||
- name: Validate meta schema
|
|
||||||
run: check-jsonschema --check-metaschema schema/v*/meta.schema.json
|
|
||||||
- name: Validate schema
|
|
||||||
run: |-
|
|
||||||
# Please bump on introduction of a new meta schema.
|
|
||||||
LATEST_META_SCHEMA_VERSION=v1
|
|
||||||
check-jsonschema \
|
|
||||||
--schemafile="schema/$LATEST_META_SCHEMA_VERSION/meta.schema.json" \
|
|
||||||
schema/synapse-config.schema.yaml
|
|
||||||
- name: Validate default config
|
|
||||||
# Populates the empty instance with default values and checks against the schema.
|
|
||||||
run: |-
|
|
||||||
echo "{}" | check-jsonschema \
|
|
||||||
--fill-defaults --schemafile=schema/synapse-config.schema.yaml -
|
|
||||||
|
|
||||||
check-doc-generation:
|
|
||||||
name: Ensure generated documentation is up-to-date
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- name: Install PyYAML
|
|
||||||
run: pip install PyYAML==6.0.2
|
|
||||||
|
|
||||||
- name: Regenerate config documentation
|
|
||||||
run: |
|
|
||||||
scripts-dev/gen_config_documentation.py \
|
|
||||||
schema/synapse-config.schema.yaml \
|
|
||||||
> docs/usage/configuration/config_documentation.md
|
|
||||||
- name: Error in case of any differences
|
|
||||||
# Errors if there are now any modified files (untracked files are ignored).
|
|
||||||
run: 'git diff --exit-code'
|
|
||||||
287
.github/workflows/tests.yml
vendored
287
.github/workflows/tests.yml
vendored
@@ -11,9 +11,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||||
# don't modify Rust code.
|
# don't modify Rust code.
|
||||||
@@ -24,9 +21,8 @@ jobs:
|
|||||||
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
|
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
|
||||||
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
|
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
|
||||||
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
||||||
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
- uses: dorny/paths-filter@v3
|
||||||
id: filter
|
id: filter
|
||||||
# We only check on PRs
|
# We only check on PRs
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
@@ -77,25 +73,20 @@ jobs:
|
|||||||
- 'poetry.lock'
|
- 'poetry.lock'
|
||||||
- '.github/workflows/tests.yml'
|
- '.github/workflows/tests.yml'
|
||||||
|
|
||||||
linting_readme:
|
|
||||||
- 'README.rst'
|
|
||||||
|
|
||||||
check-sampleconfig:
|
check-sampleconfig:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||||
- run: poetry run scripts-dev/config-lint.sh
|
- run: poetry run scripts-dev/config-lint.sh
|
||||||
@@ -106,18 +97,18 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20' 'sqlglot>=28.0.0'"
|
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
- run: scripts-dev/check_schema_delta.py --force-colors
|
||||||
|
|
||||||
check-lockfile:
|
check-lockfile:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: .ci/scripts/check_lockfile.py
|
- run: .ci/scripts/check_lockfile.py
|
||||||
@@ -129,19 +120,22 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "2.1.1"
|
|
||||||
install-project: "false"
|
install-project: "false"
|
||||||
|
|
||||||
- name: Run ruff check
|
- name: Import order (isort)
|
||||||
run: poetry run ruff check --output-format=github .
|
run: poetry run isort --check --diff .
|
||||||
|
|
||||||
- name: Run ruff format
|
- name: Code style (black)
|
||||||
run: poetry run ruff format --check .
|
run: poetry run black --check --diff .
|
||||||
|
|
||||||
|
- name: Semantic checks (ruff)
|
||||||
|
# --quiet suppresses the update check.
|
||||||
|
run: poetry run ruff --quiet .
|
||||||
|
|
||||||
lint-mypy:
|
lint-mypy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -151,16 +145,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
# We want to make use of type hints in optional dependencies too.
|
# We want to make use of type hints in optional dependencies too.
|
||||||
extras: all
|
extras: all
|
||||||
@@ -169,12 +161,11 @@ jobs:
|
|||||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
||||||
# To make CI green, err towards caution and install the project.
|
# To make CI green, err towards caution and install the project.
|
||||||
install-project: "true"
|
install-project: "true"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
# Cribbed from
|
# Cribbed from
|
||||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||||
- name: Restore/persist mypy's cache
|
- name: Restore/persist mypy's cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
.mypy_cache
|
.mypy_cache
|
||||||
@@ -187,20 +178,19 @@ jobs:
|
|||||||
lint-crlf:
|
lint-crlf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Check line endings
|
- name: Check line endings
|
||||||
run: scripts-dev/check_line_terminators.sh
|
run: scripts-dev/check_line_terminators.sh
|
||||||
|
|
||||||
lint-newsfile:
|
lint-newsfile:
|
||||||
# Only run on pull_request events, targeting develop/release branches, and skip when the PR author is dependabot[bot].
|
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||||
if: ${{ github.event_name == 'pull_request' && (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.event.pull_request.user.login != 'dependabot[bot]' }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||||
@@ -208,20 +198,37 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||||
|
|
||||||
|
lint-pydantic:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: changes
|
||||||
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
- name: Install Rust
|
||||||
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
|
with:
|
||||||
|
poetry-version: "1.3.2"
|
||||||
|
extras: "all"
|
||||||
|
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||||
|
|
||||||
lint-clippy:
|
lint-clippy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
- uses: Swatinem/rust-cache@v2
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- run: cargo clippy -- -D warnings
|
- run: cargo clippy -- -D warnings
|
||||||
|
|
||||||
@@ -233,87 +240,35 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2025-04-23
|
toolchain: nightly-2022-12-01
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo clippy --all-features -- -D warnings
|
- run: cargo clippy --all-features -- -D warnings
|
||||||
|
|
||||||
lint-rust:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
|
||||||
with:
|
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
|
||||||
with:
|
|
||||||
# Install like a normal project from source with all optional dependencies
|
|
||||||
extras: all
|
|
||||||
install-project: "true"
|
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
|
|
||||||
# The `::error::` syntax is using GitHub Actions' error annotations, see
|
|
||||||
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
|
|
||||||
run: |
|
|
||||||
if git diff --quiet Cargo.lock; then
|
|
||||||
echo "Cargo.lock is up to date"
|
|
||||||
else
|
|
||||||
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
|
|
||||||
git diff --exit-code Cargo.lock
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
|
|
||||||
# for some of the unstable options we use in `.rustfmt.toml`.
|
|
||||||
lint-rustfmt:
|
lint-rustfmt:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
# We use nightly so that we can use some unstable options that we use in
|
# We use nightly so that it correctly groups together imports
|
||||||
# `.rustfmt.toml`.
|
toolchain: nightly-2022-12-01
|
||||||
toolchain: nightly-2025-04-23
|
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo fmt --check
|
- run: cargo fmt --check
|
||||||
|
|
||||||
# This is to detect issues with the rst file, which can otherwise cause issues
|
|
||||||
# when uploading packages to PyPi.
|
|
||||||
lint-readme:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- run: "pip install rstcheck"
|
|
||||||
- run: "rstcheck --report-level=WARNING README.rst"
|
|
||||||
|
|
||||||
# Dummy step to gate other tests on without repeating the whole list
|
# Dummy step to gate other tests on without repeating the whole list
|
||||||
linting-done:
|
linting-done:
|
||||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||||
@@ -322,17 +277,16 @@ jobs:
|
|||||||
- lint-mypy
|
- lint-mypy
|
||||||
- lint-crlf
|
- lint-crlf
|
||||||
- lint-newsfile
|
- lint-newsfile
|
||||||
|
- lint-pydantic
|
||||||
- check-sampleconfig
|
- check-sampleconfig
|
||||||
- check-schema-delta
|
- check-schema-delta
|
||||||
- check-lockfile
|
- check-lockfile
|
||||||
- lint-clippy
|
- lint-clippy
|
||||||
- lint-clippy-nightly
|
- lint-clippy-nightly
|
||||||
- lint-rust
|
|
||||||
- lint-rustfmt
|
- lint-rustfmt
|
||||||
- lint-readme
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
- uses: matrix-org/done-action@v2
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
@@ -343,11 +297,10 @@ jobs:
|
|||||||
lint
|
lint
|
||||||
lint-mypy
|
lint-mypy
|
||||||
lint-newsfile
|
lint-newsfile
|
||||||
|
lint-pydantic
|
||||||
lint-clippy
|
lint-clippy
|
||||||
lint-clippy-nightly
|
lint-clippy-nightly
|
||||||
lint-rust
|
|
||||||
lint-rustfmt
|
lint-rustfmt
|
||||||
lint-readme
|
|
||||||
|
|
||||||
|
|
||||||
calculate-test-jobs:
|
calculate-test-jobs:
|
||||||
@@ -355,8 +308,8 @@ jobs:
|
|||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- id: get-matrix
|
- id: get-matrix
|
||||||
@@ -376,7 +329,7 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -391,15 +344,13 @@ jobs:
|
|||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.job.python-version }}
|
python-version: ${{ matrix.job.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: ${{ matrix.job.extras }}
|
extras: ${{ matrix.job.extras }}
|
||||||
- name: Await PostgreSQL
|
- name: Await PostgreSQL
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -430,34 +381,34 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- linting-done
|
- linting-done
|
||||||
- changes
|
- changes
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
# There aren't wheels for some of the older deps, so we need to install
|
||||||
# their build dependencies
|
# their build dependencies
|
||||||
- run: |
|
- run: |
|
||||||
sudo apt-get -qq update
|
sudo apt-get -qq update
|
||||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||||
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.8'
|
||||||
|
|
||||||
- name: Prepare old deps
|
- name: Prepare old deps
|
||||||
# Note: we install using `uv` here, not poetry or pip to allow us to test with the
|
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||||
# minimum version of all dependencies, both those explicitly specified and those
|
run: .ci/scripts/prepare_old_deps.sh
|
||||||
# implicitly brought in by the explicit dependencies.
|
|
||||||
run: |
|
# Note: we install using `pip` here, not poetry. `poetry install` ignores the
|
||||||
pip install uv
|
# build-system section (https://github.com/python-poetry/poetry/issues/6154), but
|
||||||
uv pip install --system --resolution=lowest .[all,test]
|
# we explicitly want to test that you can `pip install` using the oldest version
|
||||||
|
# of poetry-core and setuptools-rust.
|
||||||
|
- run: pip install .[all,test]
|
||||||
|
|
||||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||||
# (rather than use an editable install, which we no longer support). If we
|
# (rather than use an editable install, which we no longer support). If we
|
||||||
@@ -491,17 +442,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy-3.10"]
|
python-version: ["pypy-3.8"]
|
||||||
extras: ["all"]
|
extras: ["all"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: ${{ matrix.extras }}
|
extras: ${{ matrix.extras }}
|
||||||
- run: poetry run trial --jobs=2 tests
|
- run: poetry run trial --jobs=2 tests
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
@@ -545,15 +496,13 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Prepare test blacklist
|
- name: Prepare test blacklist
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
run: /bootstrap.sh synapse
|
run: /bootstrap.sh synapse
|
||||||
@@ -562,7 +511,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||||
@@ -592,11 +541,11 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "postgres"
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
- run: .ci/scripts/test_export_data_command.sh
|
||||||
env:
|
env:
|
||||||
@@ -615,11 +564,11 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- python-version: "3.10"
|
- python-version: "3.8"
|
||||||
postgres-version: "14"
|
postgres-version: "11"
|
||||||
|
|
||||||
- python-version: "3.14"
|
- python-version: "3.11"
|
||||||
postgres-version: "17"
|
postgres-version: "15"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
@@ -636,7 +585,7 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Add PostgreSQL apt repository
|
- name: Add PostgreSQL apt repository
|
||||||
# We need a version of pg_dump that can handle the version of
|
# We need a version of pg_dump that can handle the version of
|
||||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||||
@@ -647,10 +596,10 @@ jobs:
|
|||||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "postgres"
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
- run: .ci/scripts/test_synapse_port_db.sh
|
||||||
id: run_tester_script
|
id: run_tester_script
|
||||||
@@ -660,7 +609,7 @@ jobs:
|
|||||||
PGPASSWORD: postgres
|
PGPASSWORD: postgres
|
||||||
PGDATABASE: postgres
|
PGDATABASE: postgres
|
||||||
- name: "Upload schema differences"
|
- name: "Upload schema differences"
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||||
with:
|
with:
|
||||||
name: Schema dumps
|
name: Schema dumps
|
||||||
@@ -690,21 +639,19 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout synapse codebase
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -727,13 +674,11 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- run: cargo test
|
- run: cargo test
|
||||||
|
|
||||||
@@ -747,13 +692,13 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo bench --no-run
|
- run: cargo bench --no-run
|
||||||
|
|
||||||
@@ -772,7 +717,7 @@ jobs:
|
|||||||
- linting-done
|
- linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
- uses: matrix-org/done-action@v2
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/triage-incoming.yml
vendored
2
.github/workflows/triage-incoming.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
triage:
|
triage:
|
||||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3
|
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
|
||||||
with:
|
with:
|
||||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||||
content_id: ${{ github.event.issue.node_id }}
|
content_id: ${{ github.event.issue.node_id }}
|
||||||
|
|||||||
49
.github/workflows/triage_labelled.yml
vendored
49
.github/workflows/triage_labelled.yml
vendored
@@ -6,26 +6,39 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
move_needs_info:
|
move_needs_info:
|
||||||
|
name: Move X-Needs-Info on the triage board
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: >
|
if: >
|
||||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
env:
|
|
||||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
PROJECT_OWNER: matrix-org
|
|
||||||
# Backend issue triage board.
|
|
||||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
|
||||||
PROJECT_NUMBER: 67
|
|
||||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
|
||||||
# This field is case-sensitive.
|
|
||||||
TARGET_STATUS: Needs info
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/add-to-project@main
|
||||||
|
id: add_project
|
||||||
with:
|
with:
|
||||||
# Only clone the script file we care about, instead of the whole repo.
|
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
- name: Set status
|
||||||
- name: Ensure issue exists on the board, then set Status
|
env:
|
||||||
run: .ci/scripts/triage_labelled_issue.sh
|
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh api graphql -f query='
|
||||||
|
mutation(
|
||||||
|
$project: ID!
|
||||||
|
$item: ID!
|
||||||
|
$fieldid: ID!
|
||||||
|
$columnid: String!
|
||||||
|
) {
|
||||||
|
updateProjectV2ItemFieldValue(
|
||||||
|
input: {
|
||||||
|
projectId: $project
|
||||||
|
itemId: $item
|
||||||
|
fieldId: $fieldid
|
||||||
|
value: {
|
||||||
|
singleSelectOptionId: $columnid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
projectV2Item {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
||||||
|
|||||||
49
.github/workflows/twisted_trunk.yml
vendored
49
.github/workflows/twisted_trunk.yml
vendored
@@ -20,9 +20,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
check_repo:
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||||
@@ -43,19 +40,16 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
||||||
@@ -70,20 +64,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
extras: "all test"
|
extras: "all test"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
@@ -108,22 +99,20 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# We're using bookworm because that's what Debian oldstable is at the time of writing.
|
# We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
|
||||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||||
image: matrixdotorg/sytest-synapse:bookworm
|
image: matrixdotorg/sytest-synapse:focal
|
||||||
volumes:
|
volumes:
|
||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
|
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||||
@@ -147,7 +136,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -175,14 +164,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v4 for synapse
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -192,11 +181,11 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -x
|
set -x
|
||||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||||
pipx install poetry==2.1.1
|
pipx install poetry==1.3.2
|
||||||
|
|
||||||
poetry remove -n twisted
|
poetry remove -n twisted
|
||||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
poetry lock
|
poetry lock --no-update
|
||||||
working-directory: synapse
|
working-directory: synapse
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
@@ -217,7 +206,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -47,7 +47,6 @@ __pycache__/
|
|||||||
/.idea/
|
/.idea/
|
||||||
/.ropeproject/
|
/.ropeproject/
|
||||||
/.vscode/
|
/.vscode/
|
||||||
/.zed/
|
|
||||||
|
|
||||||
# build products
|
# build products
|
||||||
!/.coveragerc
|
!/.coveragerc
|
||||||
|
|||||||
@@ -1,6 +1 @@
|
|||||||
# Unstable options are only available on a nightly toolchain and must be opted into
|
|
||||||
unstable_features = true
|
|
||||||
|
|
||||||
# `group_imports` is an unstable option that requires nightly Rust toolchain. Tracked by
|
|
||||||
# https://github.com/rust-lang/rustfmt/issues/5083
|
|
||||||
group_imports = "StdExternalCrate"
|
group_imports = "StdExternalCrate"
|
||||||
|
|||||||
4061
CHANGES.md
4061
CHANGES.md
File diff suppressed because it is too large
Load Diff
1522
Cargo.lock
generated
1522
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +0,0 @@
|
|||||||
Licensees holding a valid commercial license with Element may use this
|
|
||||||
software in accordance with the terms contained in a written agreement
|
|
||||||
between you and Element.
|
|
||||||
|
|
||||||
To purchase a commercial license please contact our sales team at
|
|
||||||
licensing@element.io
|
|
||||||
266
README.rst
266
README.rst
@@ -1,69 +1,191 @@
|
|||||||
.. image:: ./docs/element_logo_white_bg.svg
|
.. image:: https://github.com/element-hq/product/assets/87339233/7abf477a-5277-47f3-be44-ea44917d8ed7
|
||||||
:height: 60px
|
:height: 60px
|
||||||
|
|
||||||
**Element Synapse - Matrix homeserver implementation**
|
===========================================================================================================
|
||||||
|
Element Synapse - Matrix homeserver implementation |support| |development| |documentation| |license| |pypi| |python|
|
||||||
|
===========================================================================================================
|
||||||
|
|
||||||
|support| |development| |documentation| |license| |pypi| |python|
|
Synapse is an open source `Matrix <https://matrix.org>`_ homeserver
|
||||||
|
|
||||||
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
|
|
||||||
implementation, written and maintained by `Element <https://element.io>`_.
|
implementation, written and maintained by `Element <https://element.io>`_.
|
||||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for secure and
|
`Matrix <https://github.com/matrix-org>`_ is the open standard for
|
||||||
interoperable real-time communications. You can directly run and manage the
|
secure and interoperable real time communications. You can directly run
|
||||||
source code in this repository, available under an AGPL license (or
|
and manage the source code in this repository, available under an AGPL
|
||||||
alternatively under a commercial license from Element).
|
license. There is no support provided from Element unless you have a
|
||||||
|
subscription.
|
||||||
|
|
||||||
There is no support provided by Element unless you have a subscription from
|
Subscription alternative
|
||||||
Element.
|
------------------------
|
||||||
|
|
||||||
🚀 Getting started
|
Alternatively, for those that need an enterprise-ready solution, Element
|
||||||
==================
|
Server Suite (ESS) is `available as a subscription <https://element.io/pricing>`_.
|
||||||
|
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
||||||
|
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
||||||
|
giving admins the power to easily manage an organization-wide
|
||||||
|
deployment. It includes advanced identity management, auditing,
|
||||||
|
moderation and data retention options as well as Long Term Support and
|
||||||
|
SLAs. ESS can be used to support any Matrix-based frontend client.
|
||||||
|
|
||||||
This component is developed and maintained by `Element <https://element.io>`_.
|
.. contents::
|
||||||
It gets shipped as part of the **Element Server Suite (ESS)** which provides the
|
|
||||||
official means of deployment.
|
|
||||||
|
|
||||||
ESS is a Matrix distribution from Element with focus on quality and ease of use.
|
🛠️ Installing and configuration
|
||||||
It ships a full Matrix stack tailored to the respective use case.
|
===============================
|
||||||
|
|
||||||
There are three editions of ESS:
|
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
||||||
|
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
||||||
|
<https://element-hq.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
|
||||||
|
|
||||||
- `ESS Community <https://github.com/element-hq/ess-helm>`_ - the free Matrix
|
.. _federation:
|
||||||
distribution from Element tailored to small-/mid-scale, non-commercial
|
|
||||||
community use cases
|
Synapse has a variety of `config options
|
||||||
- `ESS Pro <https://element.io/server-suite>`_ - the commercial Matrix
|
<https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
||||||
distribution from Element for professional use
|
which can be used to customise its behaviour after installation.
|
||||||
- `ESS TI-M <https://element.io/server-suite/ti-messenger>`_ - a special version
|
There are additional details on how to `configure Synapse for federation here
|
||||||
of ESS Pro focused on the requirements of TI-Messenger Pro and ePA as
|
<https://element-hq.github.io/synapse/latest/federate.html>`_.
|
||||||
specified by the German National Digital Health Agency Gematik
|
|
||||||
|
.. _reverse-proxy:
|
||||||
|
|
||||||
|
Using a reverse proxy with Synapse
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
It is recommended to put a reverse proxy such as
|
||||||
|
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||||
|
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||||
|
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
||||||
|
`HAProxy <https://www.haproxy.org/>`_ or
|
||||||
|
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
||||||
|
doing so is that it means that you can expose the default https port (443) to
|
||||||
|
Matrix clients without needing to run Synapse with root privileges.
|
||||||
|
For information on configuring one, see `the reverse proxy docs
|
||||||
|
<https://element-hq.github.io/synapse/latest/reverse_proxy.html>`_.
|
||||||
|
|
||||||
|
Upgrading an existing Synapse
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
The instructions for upgrading Synapse are in `the upgrade notes`_.
|
||||||
|
Please check these instructions as upgrading may require extra steps for some
|
||||||
|
versions of Synapse.
|
||||||
|
|
||||||
|
.. _the upgrade notes: https://element-hq.github.io/synapse/develop/upgrade.html
|
||||||
|
|
||||||
|
|
||||||
🛠️ Standalone installation and configuration
|
Platform dependencies
|
||||||
============================================
|
---------------------
|
||||||
|
|
||||||
The Synapse documentation describes `options for installing Synapse standalone
|
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
||||||
<https://element-hq.github.io/synapse/latest/setup/installation.html>`_. See
|
and aims to follow supported upstream versions. See the
|
||||||
below for more useful documentation links.
|
`deprecation policy <https://element-hq.github.io/synapse/latest/deprecation_policy.html>`_
|
||||||
|
for more details.
|
||||||
|
|
||||||
- `Synapse configuration options <https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
|
||||||
- `Synapse configuration for federation <https://element-hq.github.io/synapse/latest/federate.html>`_
|
|
||||||
- `Using a reverse proxy with Synapse <https://element-hq.github.io/synapse/latest/reverse_proxy.html>`_
|
|
||||||
- `Upgrading Synapse <https://element-hq.github.io/synapse/develop/upgrade.html>`_
|
|
||||||
|
|
||||||
|
Security note
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
||||||
|
repository endpoints`_.
|
||||||
|
|
||||||
|
.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
|
||||||
|
|
||||||
|
Whilst we make a reasonable effort to mitigate against XSS attacks (for
|
||||||
|
instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
|
||||||
|
domain hosting other web applications. This especially applies to sharing
|
||||||
|
the domain with Matrix web clients and other sensitive applications like
|
||||||
|
webmail. See
|
||||||
|
https://developer.github.com/changes/2014-04-25-user-content-security for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
.. _CSP: https://github.com/matrix-org/synapse/pull/1021
|
||||||
|
|
||||||
|
Ideally, the homeserver should not simply be on a different subdomain, but on
|
||||||
|
a completely different `registered domain`_ (also known as top-level site or
|
||||||
|
eTLD+1). This is because `some attacks`_ are still possible as long as the two
|
||||||
|
applications share the same registered domain.
|
||||||
|
|
||||||
|
.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
|
||||||
|
|
||||||
|
.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
|
||||||
|
|
||||||
|
To illustrate this with an example, if your Element Web or other sensitive web
|
||||||
|
application is hosted on ``A.example1.com``, you should ideally host Synapse on
|
||||||
|
``example2.com``. Some amount of protection is offered by hosting on
|
||||||
|
``B.example1.com`` instead, so this is also acceptable in some scenarios.
|
||||||
|
However, you should *not* host your Synapse on ``A.example1.com``.
|
||||||
|
|
||||||
|
Note that all of the above refers exclusively to the domain used in Synapse's
|
||||||
|
``public_baseurl`` setting. In particular, it has no bearing on the domain
|
||||||
|
mentioned in MXIDs hosted on that server.
|
||||||
|
|
||||||
|
Following this advice ensures that even if an XSS is found in Synapse, the
|
||||||
|
impact to other applications will be minimal.
|
||||||
|
|
||||||
|
|
||||||
|
🧪 Testing a new installation
|
||||||
|
============================
|
||||||
|
|
||||||
|
The easiest way to try out your new Synapse installation is by connecting to it
|
||||||
|
from a web client.
|
||||||
|
|
||||||
|
Unless you are running a test instance of Synapse on your local machine, in
|
||||||
|
general, you will need to enable TLS support before you can successfully
|
||||||
|
connect from a client: see
|
||||||
|
`TLS certificates <https://element-hq.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
||||||
|
|
||||||
|
An easy way to get started is to login or register via Element at
|
||||||
|
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||||
|
You will need to change the server you are logging into from ``matrix.org``
|
||||||
|
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||||
|
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||||
|
If you prefer to use another client, refer to our
|
||||||
|
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
||||||
|
|
||||||
|
If all goes well you should at least be able to log in, create a room, and
|
||||||
|
start sending messages.
|
||||||
|
|
||||||
|
.. _`client-user-reg`:
|
||||||
|
|
||||||
|
Registering a new user from a client
|
||||||
|
------------------------------------
|
||||||
|
|
||||||
|
By default, registration of new users via Matrix clients is disabled. To enable
|
||||||
|
it:
|
||||||
|
|
||||||
|
1. In the
|
||||||
|
`registration config section <https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
|
||||||
|
set ``enable_registration: true`` in ``homeserver.yaml``.
|
||||||
|
2. Then **either**:
|
||||||
|
|
||||||
|
a. set up a `CAPTCHA <https://element-hq.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
|
||||||
|
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
|
||||||
|
|
||||||
|
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
||||||
|
the public internet. Without it, anyone can freely register accounts on your homeserver.
|
||||||
|
This can be exploited by attackers to create spambots targetting the rest of the Matrix
|
||||||
|
federation.
|
||||||
|
|
||||||
|
Your new user name will be formed partly from the ``server_name``, and partly
|
||||||
|
from a localpart you specify when you create the account. Your name will take
|
||||||
|
the form of::
|
||||||
|
|
||||||
|
@localpart:my.domain.name
|
||||||
|
|
||||||
|
(pronounced "at localpart on my dot domain dot name").
|
||||||
|
|
||||||
|
As when logging in, you will need to specify a "Custom server". Specify your
|
||||||
|
desired ``localpart`` in the 'User name' box.
|
||||||
|
|
||||||
🎯 Troubleshooting and support
|
🎯 Troubleshooting and support
|
||||||
==============================
|
=============================
|
||||||
|
|
||||||
🚀 Professional support
|
🚀 Professional support
|
||||||
-----------------------
|
----------------------
|
||||||
|
|
||||||
Enterprise quality support for Synapse including SLAs is available as part of an
|
Enterprise quality support for Synapse including SLAs is available as part of an
|
||||||
`Element Server Suite (ESS) <https://element.io/pricing>`_ subscription.
|
`Element Server Suite (ESS) <https://element.io/pricing>` subscription.
|
||||||
|
|
||||||
If you are an existing ESS subscriber then you can raise a `support request <https://ems.element.io/support>`_
|
If you are an existing ESS subscriber then you can raise a `support request <https://ems.element.io/support>`
|
||||||
and access the `Element product documentation <https://docs.element.io>`_.
|
and access the `knowledge base <https://ems-docs.element.io>`.
|
||||||
|
|
||||||
🤝 Community support
|
🤝 Community support
|
||||||
--------------------
|
-------------------
|
||||||
|
|
||||||
The `Admin FAQ <https://element-hq.github.io/synapse/latest/usage/administration/admin_faq.html>`_
|
The `Admin FAQ <https://element-hq.github.io/synapse/latest/usage/administration/admin_faq.html>`_
|
||||||
includes tips on dealing with some common problems. For more details, see
|
includes tips on dealing with some common problems. For more details, see
|
||||||
@@ -79,6 +201,35 @@ issues for support requests, only for bug reports and feature requests.
|
|||||||
.. |docs| replace:: ``docs``
|
.. |docs| replace:: ``docs``
|
||||||
.. _docs: docs
|
.. _docs: docs
|
||||||
|
|
||||||
|
🪪 Identity Servers
|
||||||
|
==================
|
||||||
|
|
||||||
|
Identity servers have the job of mapping email addresses and other 3rd Party
|
||||||
|
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
||||||
|
before creating that mapping.
|
||||||
|
|
||||||
|
**They are not where accounts or credentials are stored - these live on home
|
||||||
|
servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
|
||||||
|
|
||||||
|
This process is very security-sensitive, as there is obvious risk of spam if it
|
||||||
|
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
||||||
|
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
||||||
|
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
||||||
|
the role of managing trusted identity in the Matrix ecosystem is farmed out to
|
||||||
|
a cluster of known trusted ecosystem partners, who run 'Matrix Identity
|
||||||
|
Servers' such as `Sydent <https://github.com/matrix-org/sydent>`_, whose role
|
||||||
|
is purely to authenticate and track 3PID logins and publish end-user public
|
||||||
|
keys.
|
||||||
|
|
||||||
|
You can host your own copy of Sydent, but this will prevent you reaching other
|
||||||
|
users in the Matrix ecosystem via their email address, and prevent them finding
|
||||||
|
you. We therefore recommend that you use one of the centralised identity servers
|
||||||
|
at ``https://matrix.org`` or ``https://vector.im`` for now.
|
||||||
|
|
||||||
|
To reiterate: the Identity server will only be used if you choose to associate
|
||||||
|
an email address with your account, or send an invite to another user via their
|
||||||
|
email address.
|
||||||
|
|
||||||
|
|
||||||
🛠️ Development
|
🛠️ Development
|
||||||
==============
|
==============
|
||||||
@@ -86,9 +237,9 @@ issues for support requests, only for bug reports and feature requests.
|
|||||||
We welcome contributions to Synapse from the community!
|
We welcome contributions to Synapse from the community!
|
||||||
The best place to get started is our
|
The best place to get started is our
|
||||||
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||||
This is part of our broader `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
||||||
information for Synapse developers as well as Synapse administrators.
|
|
||||||
|
|
||||||
|
information for Synapse developers as well as Synapse administrators.
|
||||||
Developers might be particularly interested in:
|
Developers might be particularly interested in:
|
||||||
|
|
||||||
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
||||||
@@ -98,33 +249,6 @@ Developers might be particularly interested in:
|
|||||||
Alongside all that, join our developer community on Matrix:
|
Alongside all that, join our developer community on Matrix:
|
||||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||||
|
|
||||||
Copyright and Licensing
|
|
||||||
=======================
|
|
||||||
|
|
||||||
| Copyright 2014–2017 OpenMarket Ltd
|
|
||||||
| Copyright 2017 Vector Creations Ltd
|
|
||||||
| Copyright 2017–2025 New Vector Ltd
|
|
||||||
| Copyright 2025 Element Creations Ltd
|
|
||||||
|
|
||||||
This software is dual-licensed by Element Creations Ltd (Element). It can be
|
|
||||||
used either:
|
|
||||||
|
|
||||||
(1) for free under the terms of the GNU Affero General Public License (as
|
|
||||||
published by the Free Software Foundation, either version 3 of the License,
|
|
||||||
or (at your option) any later version); OR
|
|
||||||
|
|
||||||
(2) under the terms of a paid-for Element Commercial License agreement between
|
|
||||||
you and Element (the terms of which may vary depending on what you and
|
|
||||||
Element have agreed to).
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software distributed
|
|
||||||
under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
|
||||||
CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the
|
|
||||||
specific language governing permissions and limitations under the Licenses.
|
|
||||||
|
|
||||||
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase
|
|
||||||
an Element commercial license for this software.
|
|
||||||
|
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||||
:alt: (get community support in #synapse:matrix.org)
|
:alt: (get community support in #synapse:matrix.org)
|
||||||
|
|||||||
@@ -1,14 +1,12 @@
|
|||||||
# A build script for poetry that adds the rust extension.
|
# A build script for poetry that adds the rust extension.
|
||||||
|
|
||||||
import itertools
|
|
||||||
import os
|
import os
|
||||||
from typing import Any
|
from typing import Any, Dict
|
||||||
|
|
||||||
from packaging.specifiers import SpecifierSet
|
|
||||||
from setuptools_rust import Binding, RustExtension
|
from setuptools_rust import Binding, RustExtension
|
||||||
|
|
||||||
|
|
||||||
def build(setup_kwargs: dict[str, Any]) -> None:
|
def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
||||||
|
|
||||||
@@ -16,27 +14,10 @@ def build(setup_kwargs: dict[str, Any]) -> None:
|
|||||||
target="synapse.synapse_rust",
|
target="synapse.synapse_rust",
|
||||||
path=cargo_toml_path,
|
path=cargo_toml_path,
|
||||||
binding=Binding.PyO3,
|
binding=Binding.PyO3,
|
||||||
# This flag is a no-op in the latest versions. Instead, we need to
|
|
||||||
# specify this in the `bdist_wheel` config below.
|
|
||||||
py_limited_api=True,
|
py_limited_api=True,
|
||||||
# We always build in release mode, as we can't distinguish
|
# We force always building in release mode, as we can't tell the
|
||||||
# between using `poetry` in development vs production.
|
# difference between using `poetry` in development vs production.
|
||||||
debug=False,
|
debug=False,
|
||||||
)
|
)
|
||||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
||||||
setup_kwargs["zip_safe"] = False
|
setup_kwargs["zip_safe"] = False
|
||||||
|
|
||||||
# We look up the minimum supported Python version with
|
|
||||||
# `python_requires` (e.g. ">=3.10.0,<4.0.0") and finding the first Python
|
|
||||||
# version that matches. We then convert that into the `py_limited_api` form,
|
|
||||||
# e.g. cp310 for Python 3.10.
|
|
||||||
py_limited_api: str
|
|
||||||
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
|
||||||
for minor_version in itertools.count(start=10):
|
|
||||||
if f"3.{minor_version}.0" in python_bounds:
|
|
||||||
py_limited_api = f"cp3{minor_version}"
|
|
||||||
break
|
|
||||||
|
|
||||||
setup_kwargs.setdefault("options", {}).setdefault("bdist_wheel", {})[
|
|
||||||
"py_limited_api"
|
|
||||||
] = py_limited_api
|
|
||||||
|
|||||||
1
changelog.d/17187.feature
Normal file
1
changelog.d/17187.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add initial implementation of an experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
1
changelog.d/17198.misc
Normal file
1
changelog.d/17198.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove unused `expire_access_token` option in the Synapse Docker config file. Contributed by @AaronDewes.
|
||||||
1
changelog.d/17254.bugfix
Normal file
1
changelog.d/17254.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix searching for users with their exact localpart whose ID includes a hyphen.
|
||||||
1
changelog.d/17256.feature
Normal file
1
changelog.d/17256.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Improve ratelimiting in Synapse (#17256).
|
||||||
1
changelog.d/17265.misc
Normal file
1
changelog.d/17265.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Use fully-qualified `PersistedEventPosition` when returning `RoomsForUser` to facilitate proper comparisons and `RoomStreamToken` generation.
|
||||||
1
changelog.d/17266.misc
Normal file
1
changelog.d/17266.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add debug logging for when room keys are uploaded, including whether they are replacing other room keys.
|
||||||
1
changelog.d/17270.feature
Normal file
1
changelog.d/17270.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add support for the unstable [MSC4151](https://github.com/matrix-org/matrix-spec-proposals/pull/4151) report room API.
|
||||||
1
changelog.d/17271.misc
Normal file
1
changelog.d/17271.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Handle OTK uploads off master.
|
||||||
1
changelog.d/17272.bugfix
Normal file
1
changelog.d/17272.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix wrong retention policy being used when filtering events.
|
||||||
1
changelog.d/17273.misc
Normal file
1
changelog.d/17273.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Don't try and resync devices for remote users whose servers are marked as down.
|
||||||
1
changelog.d/17275.bugfix
Normal file
1
changelog.d/17275.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix bug where OTKs were not always included in `/sync` response when using workers.
|
||||||
1
changelog.d/17276.feature
Normal file
1
changelog.d/17276.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Filter for public and empty rooms added to Admin-API [List Room API](https://element-hq.github.io/synapse/latest/admin_api/rooms.html#list-room-api).
|
||||||
1
changelog.d/17277.feature
Normal file
1
changelog.d/17277.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add `is_dm` filtering to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
1
changelog.d/17279.misc
Normal file
1
changelog.d/17279.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Re-organize Pydantic models and types used in handlers.
|
||||||
1
changelog.d/17281.feature
Normal file
1
changelog.d/17281.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add `is_encrypted` filtering to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
1
changelog.d/17282.feature
Normal file
1
changelog.d/17282.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Include user membership in events served to clients, per MSC4115.
|
||||||
1
changelog.d/17283.bugfix
Normal file
1
changelog.d/17283.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a long-standing bug where an invalid 'from' parameter to [`/notifications`](https://spec.matrix.org/v1.10/client-server-api/#get_matrixclientv3notifications) would result in an Internal Server Error.
|
||||||
1
changelog.d/17284.feature
Normal file
1
changelog.d/17284.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Do not require user-interactive authentication for uploading cross-signing keys for the first time, per MSC3967.
|
||||||
1
changelog.d/17293.feature
Normal file
1
changelog.d/17293.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add `stream_ordering` sort to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
2
changelog.d/17294.feature
Normal file
2
changelog.d/17294.feature
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
`register_new_matrix_user` now supports a --password-file flag, which
|
||||||
|
is useful for scripting.
|
||||||
1
changelog.d/17295.bugfix
Normal file
1
changelog.d/17295.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix edge case in `/sync` returning the wrong the state when using sharded event persisters.
|
||||||
1
changelog.d/17296.feature
Normal file
1
changelog.d/17296.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add support for the unstable [MSC4151](https://github.com/matrix-org/matrix-spec-proposals/pull/4151) report room API.
|
||||||
1
changelog.d/17297.misc
Normal file
1
changelog.d/17297.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Bump `mypy` from 1.8.0 to 1.9.0.
|
||||||
1
changelog.d/17300.misc
Normal file
1
changelog.d/17300.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Expose the worker instance that persisted the event on `event.internal_metadata.instance_name`.
|
||||||
1
changelog.d/17301.bugfix
Normal file
1
changelog.d/17301.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add initial implementation of an experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
2
changelog.d/17304.feature
Normal file
2
changelog.d/17304.feature
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
`register_new_matrix_user` now supports a --exists-ok flag to allow registration of users that already exist in the database.
|
||||||
|
This is useful for scripts that bootstrap user accounts with initial passwords.
|
||||||
1
changelog.d/17308.doc
Normal file
1
changelog.d/17308.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add missing quotes for example for `exclude_rooms_from_sync`.
|
||||||
1
changelog.d/17322.feature
Normal file
1
changelog.d/17322.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add support for via query parameter from MSC415.
|
||||||
1
changelog.d/17324.misc
Normal file
1
changelog.d/17324.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Update the README with Element branding, improve headers and fix the #synapse:matrix.org support room link rendering.
|
||||||
1
changelog.d/17325.misc
Normal file
1
changelog.d/17325.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
This is a changelog so tests will run.
|
||||||
1
changelog.d/17331.misc
Normal file
1
changelog.d/17331.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Change path of the experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync implementation to `/org.matrix.simplified_msc3575/sync` since our simplified API is slightly incompatible with what's in the current MSC.
|
||||||
1
changelog.d/17339.misc
Normal file
1
changelog.d/17339.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Tidy up `parse_integer` docs and call sites to reflect the fact that they require non-negative integers by default, and bring `parse_integer_from_args` default in alignment. Contributed by Denis Kasak (@dkasak).
|
||||||
@@ -1 +0,0 @@
|
|||||||
Group together dependabot update PRs to reduce the review load.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix `HomeServer.shutdown()` failing if the homeserver hasn't been setup yet.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix sliding sync performance slow down for long lived connections.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Respond with useful error codes with `Content-Length` header/s are invalid.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a bug where Mastodon posts (and possibly other embeds) have the wrong description for URL previews.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix `HomeServer.shutdown()` failing if the homeserver failed to `start`.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Switch the build backend from `poetry-core` to `maturin`.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Raise the limit for concurrently-open non-security @dependabot PRs from 5 to 10.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Remove the "Updates to locked dependencies" section from the changelog due to lack of use and the maintenance burden.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Require 14 days to pass before pulling in general dependency updates to help mitigate upstream supply chain attacks.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add `memberships` endpoint to the admin API. This is useful for forensics and T&S purpose.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Drop the broken netlify documentation workflow until a new one is implemented.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix bug where `Duration` was logged incorrectly.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add an admin API for retrieving a paginated list of quarantined media.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Document the importance of `public_baseurl` when configuring OpenID Connect authentication.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix bug introduced in 1.143.0 that broke support for versions of `zope-interface` older than 6.2.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Server admins can bypass the quarantine media check when downloading media by setting the `admin_unsafely_bypass_quarantine` query parameter to `true` on Client-Server API media download requests.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Don't include debug logs in `Clock` unless explicitly enabled.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Implemented pagination for the [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) mutual rooms endpoint. Contributed by @tulir @ Beeper.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Admin API: add worker support to `GET /_synapse/admin/v2/users/<user_id>`.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Use `uv` to test olddeps to ensure all transitive dependencies use minimum versions.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Improve proxy support for the `federation_client.py` dev script. Contributed by Denis Kasak (@dkasak).
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Unpin the version of Rust we use to build Synapse wheels (was 1.82.0) now that MacOS support has been dropped.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add experimental support for the [MSC4370](https://github.com/matrix-org/matrix-spec-proposals/pull/4370) Federation API `GET /extremities` endpoint.
|
|
||||||
@@ -21,8 +21,7 @@
|
|||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
"""Starts a synapse client console."""
|
""" Starts a synapse client console. """
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import binascii
|
import binascii
|
||||||
import cmd
|
import cmd
|
||||||
@@ -33,6 +32,7 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from http import TwistedHttpClient
|
from http import TwistedHttpClient
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import urlparse
|
import urlparse
|
||||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
||||||
@@ -244,7 +244,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
if "flows" not in json_res:
|
if "flows" not in json_res:
|
||||||
print("Failed to find any login flows.")
|
print("Failed to find any login flows.")
|
||||||
return False
|
defer.returnValue(False)
|
||||||
|
|
||||||
flow = json_res["flows"][0] # assume first is the one we want.
|
flow = json_res["flows"][0] # assume first is the one we want.
|
||||||
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
||||||
@@ -253,8 +253,8 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"Unable to login via the command line client. Please visit "
|
"Unable to login via the command line client. Please visit "
|
||||||
"%s to login." % fallback_url
|
"%s to login." % fallback_url
|
||||||
)
|
)
|
||||||
return False
|
defer.returnValue(False)
|
||||||
return True
|
defer.returnValue(True)
|
||||||
|
|
||||||
def do_emailrequest(self, line):
|
def do_emailrequest(self, line):
|
||||||
"""Requests the association of a third party identifier
|
"""Requests the association of a third party identifier
|
||||||
@@ -725,7 +725,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
method,
|
method,
|
||||||
path,
|
path,
|
||||||
data=None,
|
data=None,
|
||||||
query_params: dict | None = None,
|
query_params: Optional[dict] = None,
|
||||||
alt_text=None,
|
alt_text=None,
|
||||||
):
|
):
|
||||||
"""Runs an HTTP request and pretty prints the output.
|
"""Runs an HTTP request and pretty prints the output.
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
import json
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
@@ -77,7 +78,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
url, data, headers_dict={"Content-Type": ["application/json"]}
|
url, data, headers_dict={"Content-Type": ["application/json"]}
|
||||||
)
|
)
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return response.code, body
|
defer.returnValue((response.code, body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_json(self, url, args=None):
|
def get_json(self, url, args=None):
|
||||||
@@ -87,9 +88,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
url = "%s?%s" % (url, qs)
|
url = "%s?%s" % (url, qs)
|
||||||
response = yield self._create_get_request(url)
|
response = yield self._create_get_request(url)
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return json.loads(body)
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict: dict | None = None):
|
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||||
"""Wrapper of _create_request to issue a PUT request"""
|
"""Wrapper of _create_request to issue a PUT request"""
|
||||||
headers_dict = headers_dict or {}
|
headers_dict = headers_dict or {}
|
||||||
|
|
||||||
@@ -100,7 +101,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict: dict | None = None):
|
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
||||||
"""Wrapper of _create_request to issue a GET request"""
|
"""Wrapper of _create_request to issue a GET request"""
|
||||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
||||||
|
|
||||||
@@ -112,7 +113,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
data=None,
|
data=None,
|
||||||
qparams=None,
|
qparams=None,
|
||||||
jsonreq=True,
|
jsonreq=True,
|
||||||
headers: dict | None = None,
|
headers: Optional[dict] = None,
|
||||||
):
|
):
|
||||||
headers = headers or {}
|
headers = headers or {}
|
||||||
|
|
||||||
@@ -133,11 +134,11 @@ class TwistedHttpClient(HttpClient):
|
|||||||
response = yield self._create_request(method, url)
|
response = yield self._create_request(method, url)
|
||||||
|
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return json.loads(body)
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(
|
def _create_request(
|
||||||
self, method, url, producer=None, headers_dict: dict | None = None
|
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
||||||
):
|
):
|
||||||
"""Creates and sends a request to the given url"""
|
"""Creates and sends a request to the given url"""
|
||||||
headers_dict = headers_dict or {}
|
headers_dict = headers_dict or {}
|
||||||
@@ -172,7 +173,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
if self.verbose:
|
if self.verbose:
|
||||||
print("Status %s %s" % (response.code, response.phrase))
|
print("Status %s %s" % (response.code, response.phrase))
|
||||||
print(pformat(list(response.headers.getAllRawHeaders())))
|
print(pformat(list(response.headers.getAllRawHeaders())))
|
||||||
return response
|
defer.returnValue(response)
|
||||||
|
|
||||||
def sleep(self, seconds):
|
def sleep(self, seconds):
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
|
|||||||
@@ -30,6 +30,3 @@ docker-compose up -d
|
|||||||
### More information
|
### More information
|
||||||
|
|
||||||
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
|
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
|
||||||
|
|
||||||
**For a more comprehensive Docker Compose example showcasing a full Matrix 2.0 stack, please see
|
|
||||||
https://github.com/element-hq/element-docker-demo**
|
|
||||||
@@ -51,7 +51,7 @@ services:
|
|||||||
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/postgres:15-alpine
|
image: docker.io/postgres:12-alpine
|
||||||
# Change that password, of course!
|
# Change that password, of course!
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=synapse
|
- POSTGRES_USER=synapse
|
||||||
|
|||||||
@@ -8,9 +8,6 @@ All examples and snippets assume that your Synapse service is called `synapse` i
|
|||||||
|
|
||||||
An example Docker Compose file can be found [here](docker-compose.yaml).
|
An example Docker Compose file can be found [here](docker-compose.yaml).
|
||||||
|
|
||||||
**For a more comprehensive Docker Compose example, showcasing a full Matrix 2.0 stack (originally based on this
|
|
||||||
docker-compose.yaml), please see https://github.com/element-hq/element-docker-demo**
|
|
||||||
|
|
||||||
## Worker Service Examples in Docker Compose
|
## Worker Service Examples in Docker Compose
|
||||||
|
|
||||||
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
||||||
|
|||||||
@@ -220,24 +220,29 @@
|
|||||||
"yBucketBound": "auto"
|
"yBucketBound": "auto"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
|
||||||
"uid": "${DS_PROMETHEUS}",
|
|
||||||
"type": "prometheus"
|
|
||||||
},
|
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
"dashLength": 10,
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"description": "",
|
||||||
"fieldConfig": {
|
"fieldConfig": {
|
||||||
"defaults": {
|
"defaults": {
|
||||||
"links": []
|
"links": []
|
||||||
},
|
},
|
||||||
"overrides": []
|
"overrides": []
|
||||||
},
|
},
|
||||||
|
"fill": 0,
|
||||||
|
"fillGradient": 0,
|
||||||
"gridPos": {
|
"gridPos": {
|
||||||
"h": 9,
|
"h": 9,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 12,
|
"x": 12,
|
||||||
"y": 1
|
"y": 1
|
||||||
},
|
},
|
||||||
|
"hiddenSeries": false,
|
||||||
"id": 152,
|
"id": 152,
|
||||||
"legend": {
|
"legend": {
|
||||||
"avg": false,
|
"avg": false,
|
||||||
@@ -250,81 +255,71 @@
|
|||||||
"values": false
|
"values": false
|
||||||
},
|
},
|
||||||
"lines": true,
|
"lines": true,
|
||||||
|
"linewidth": 0,
|
||||||
|
"links": [],
|
||||||
"nullPointMode": "connected",
|
"nullPointMode": "connected",
|
||||||
"options": {
|
"options": {
|
||||||
"alertThreshold": true
|
"alertThreshold": true
|
||||||
},
|
},
|
||||||
"paceLength": 10,
|
"paceLength": 10,
|
||||||
"pluginVersion": "10.4.3",
|
"percentage": false,
|
||||||
|
"pluginVersion": "9.2.2",
|
||||||
"pointradius": 5,
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
"renderer": "flot",
|
"renderer": "flot",
|
||||||
"seriesOverrides": [
|
"seriesOverrides": [
|
||||||
{
|
{
|
||||||
"alias": "Avg",
|
"alias": "Avg",
|
||||||
"fill": 0,
|
"fill": 0,
|
||||||
"linewidth": 3,
|
"linewidth": 3
|
||||||
"$$hashKey": "object:48"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "99%",
|
"alias": "99%",
|
||||||
"color": "#C4162A",
|
"color": "#C4162A",
|
||||||
"fillBelowTo": "90%",
|
"fillBelowTo": "90%"
|
||||||
"$$hashKey": "object:49"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "90%",
|
"alias": "90%",
|
||||||
"color": "#FF7383",
|
"color": "#FF7383",
|
||||||
"fillBelowTo": "75%",
|
"fillBelowTo": "75%"
|
||||||
"$$hashKey": "object:50"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "75%",
|
"alias": "75%",
|
||||||
"color": "#FFEE52",
|
"color": "#FFEE52",
|
||||||
"fillBelowTo": "50%",
|
"fillBelowTo": "50%"
|
||||||
"$$hashKey": "object:51"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "50%",
|
"alias": "50%",
|
||||||
"color": "#73BF69",
|
"color": "#73BF69",
|
||||||
"fillBelowTo": "25%",
|
"fillBelowTo": "25%"
|
||||||
"$$hashKey": "object:52"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "25%",
|
"alias": "25%",
|
||||||
"color": "#1F60C4",
|
"color": "#1F60C4",
|
||||||
"fillBelowTo": "5%",
|
"fillBelowTo": "5%"
|
||||||
"$$hashKey": "object:53"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "5%",
|
"alias": "5%",
|
||||||
"lines": false,
|
"lines": false
|
||||||
"$$hashKey": "object:54"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "Average",
|
"alias": "Average",
|
||||||
"color": "rgb(255, 255, 255)",
|
"color": "rgb(255, 255, 255)",
|
||||||
"lines": true,
|
"lines": true,
|
||||||
"linewidth": 3,
|
"linewidth": 3
|
||||||
"$$hashKey": "object:55"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "Local events being persisted",
|
"alias": "Events",
|
||||||
"color": "#96d98D",
|
|
||||||
"points": true,
|
|
||||||
"yaxis": 2,
|
|
||||||
"zindex": -3,
|
|
||||||
"$$hashKey": "object:56"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$$hashKey": "object:329",
|
|
||||||
"color": "#B877D9",
|
"color": "#B877D9",
|
||||||
"alias": "All events being persisted",
|
"hideTooltip": true,
|
||||||
"points": true,
|
"points": true,
|
||||||
"yaxis": 2,
|
"yaxis": 2,
|
||||||
"zindex": -3
|
"zindex": -3
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"spaceLength": 10,
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -389,20 +384,7 @@
|
|||||||
},
|
},
|
||||||
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
||||||
"legendFormat": "Average",
|
"legendFormat": "Average",
|
||||||
"refId": "H",
|
"refId": "H"
|
||||||
"editorMode": "code",
|
|
||||||
"range": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": {
|
|
||||||
"uid": "${DS_PROMETHEUS}"
|
|
||||||
},
|
|
||||||
"expr": "sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
|
||||||
"hide": false,
|
|
||||||
"instant": false,
|
|
||||||
"legendFormat": "Local events being persisted",
|
|
||||||
"refId": "E",
|
|
||||||
"editorMode": "code"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -411,9 +393,8 @@
|
|||||||
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
|
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
|
||||||
"hide": false,
|
"hide": false,
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "All events being persisted",
|
"legendFormat": "Events",
|
||||||
"refId": "I",
|
"refId": "E"
|
||||||
"editorMode": "code"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"thresholds": [
|
"thresholds": [
|
||||||
@@ -447,9 +428,7 @@
|
|||||||
"xaxis": {
|
"xaxis": {
|
||||||
"mode": "time",
|
"mode": "time",
|
||||||
"show": true,
|
"show": true,
|
||||||
"values": [],
|
"values": []
|
||||||
"name": null,
|
|
||||||
"buckets": null
|
|
||||||
},
|
},
|
||||||
"yaxes": [
|
"yaxes": [
|
||||||
{
|
{
|
||||||
@@ -471,20 +450,7 @@
|
|||||||
],
|
],
|
||||||
"yaxis": {
|
"yaxis": {
|
||||||
"align": false
|
"align": false
|
||||||
},
|
}
|
||||||
"bars": false,
|
|
||||||
"dashes": false,
|
|
||||||
"description": "",
|
|
||||||
"fill": 0,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"hiddenSeries": false,
|
|
||||||
"linewidth": 0,
|
|
||||||
"percentage": false,
|
|
||||||
"points": false,
|
|
||||||
"stack": false,
|
|
||||||
"steppedLine": false,
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
@@ -2166,10 +2132,10 @@
|
|||||||
"datasource": {
|
"datasource": {
|
||||||
"uid": "${DS_PROMETHEUS}"
|
"uid": "${DS_PROMETHEUS}"
|
||||||
},
|
},
|
||||||
"expr": "rate(synapse_storage_events_persisted_events_sep_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
"expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
"intervalFactor": 2,
|
"intervalFactor": 2,
|
||||||
"legendFormat": "{{origin_type}}",
|
"legendFormat": "{{type}}",
|
||||||
"refId": "D"
|
"refId": "D"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -2254,7 +2220,7 @@
|
|||||||
"datasource": {
|
"datasource": {
|
||||||
"uid": "${DS_PROMETHEUS}"
|
"uid": "${DS_PROMETHEUS}"
|
||||||
},
|
},
|
||||||
"expr": "sum by(type) (rate(synapse_storage_events_persisted_events_sep_total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
"expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"intervalFactor": 2,
|
"intervalFactor": 2,
|
||||||
@@ -2294,6 +2260,99 @@
|
|||||||
"align": false
|
"align": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {
|
||||||
|
"irc-freenode (local)": "#EAB839"
|
||||||
|
},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"decimals": 1,
|
||||||
|
"fill": 1,
|
||||||
|
"fillGradient": 0,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 7,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 44
|
||||||
|
},
|
||||||
|
"hiddenSeries": false,
|
||||||
|
"id": 44,
|
||||||
|
"legend": {
|
||||||
|
"alignAsTable": true,
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"hideEmpty": true,
|
||||||
|
"hideZero": true,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"links": [],
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"options": {
|
||||||
|
"alertThreshold": true
|
||||||
|
},
|
||||||
|
"percentage": false,
|
||||||
|
"pluginVersion": "9.2.2",
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 2,
|
||||||
|
"legendFormat": "{{origin_entity}} ({{origin_type}})",
|
||||||
|
"refId": "A",
|
||||||
|
"step": 20
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeRegions": [],
|
||||||
|
"title": "Events/s by Origin",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": false,
|
||||||
|
"sort": 2,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"mode": "time",
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "hertz",
|
||||||
|
"logBase": 1,
|
||||||
|
"min": "0",
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"logBase": 1,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
"bars": false,
|
"bars": false,
|
||||||
@@ -4303,7 +4362,7 @@
|
|||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "{{origin_server_name}} ",
|
"legendFormat": "{{server_name}} ",
|
||||||
"range": true,
|
"range": true,
|
||||||
"refId": "A"
|
"refId": "A"
|
||||||
}
|
}
|
||||||
@@ -4425,7 +4484,7 @@
|
|||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "{{destination_server_name}}",
|
"legendFormat": "{{server_name}}",
|
||||||
"range": true,
|
"range": true,
|
||||||
"refId": "A"
|
"refId": "A"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,10 +20,11 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import cgi
|
||||||
import datetime
|
import datetime
|
||||||
import html
|
|
||||||
import json
|
import json
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import pydot
|
import pydot
|
||||||
|
|
||||||
@@ -32,7 +33,7 @@ def make_name(pdu_id: str, origin: str) -> str:
|
|||||||
return f"{pdu_id}@{origin}"
|
return f"{pdu_id}@{origin}"
|
||||||
|
|
||||||
|
|
||||||
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||||
"""
|
"""
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
Generate a dot and SVG file for a graph of events in the room based on the
|
||||||
topological ordering by querying a homeserver.
|
topological ordering by querying a homeserver.
|
||||||
@@ -44,10 +45,6 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
|||||||
colors = {"red", "green", "blue", "yellow", "purple"}
|
colors = {"red", "green", "blue", "yellow", "purple"}
|
||||||
|
|
||||||
for pdu in pdus:
|
for pdu in pdus:
|
||||||
# TODO: The "origin" field has since been removed from events generated
|
|
||||||
# by Synapse. We should consider removing it here as well but since this
|
|
||||||
# is part of `contrib/`, it is left for the community to revise and ensure things
|
|
||||||
# still work correctly.
|
|
||||||
origins.add(pdu.get("origin"))
|
origins.add(pdu.get("origin"))
|
||||||
|
|
||||||
color_map = {color: color for color in colors if color in origins}
|
color_map = {color: color for color in colors if color in origins}
|
||||||
@@ -88,7 +85,7 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
|||||||
"name": name,
|
"name": name,
|
||||||
"type": pdu.get("pdu_type"),
|
"type": pdu.get("pdu_type"),
|
||||||
"state_key": pdu.get("state_key"),
|
"state_key": pdu.get("state_key"),
|
||||||
"content": html.escape(json.dumps(pdu.get("content")), quote=True),
|
"content": cgi.escape(json.dumps(pdu.get("content")), quote=True),
|
||||||
"time": t,
|
"time": t,
|
||||||
"depth": pdu.get("depth"),
|
"depth": pdu.get("depth"),
|
||||||
}
|
}
|
||||||
@@ -126,7 +123,7 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
|||||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
def get_pdus(host: str, room: str) -> list[dict]:
|
def get_pdus(host: str, room: str) -> List[dict]:
|
||||||
transaction = json.loads(
|
transaction = json.loads(
|
||||||
urllib.request.urlopen(
|
urllib.request.urlopen(
|
||||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
||||||
|
|||||||
@@ -44,3 +44,31 @@ groups:
|
|||||||
###
|
###
|
||||||
### End of 'Prometheus Console Only' rules block
|
### End of 'Prometheus Console Only' rules block
|
||||||
###
|
###
|
||||||
|
|
||||||
|
|
||||||
|
###
|
||||||
|
### Grafana Only
|
||||||
|
### The following rules are only needed if you use the Grafana dashboard
|
||||||
|
### in contrib/grafana/synapse.json
|
||||||
|
###
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
|
||||||
|
labels:
|
||||||
|
type: remote
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
|
||||||
|
labels:
|
||||||
|
type: local
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
|
||||||
|
labels:
|
||||||
|
type: bridges
|
||||||
|
|
||||||
|
- record: synapse_storage_events_persisted_by_event_type
|
||||||
|
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
|
||||||
|
|
||||||
|
- record: synapse_storage_events_persisted_by_origin
|
||||||
|
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
|
||||||
|
###
|
||||||
|
### End of 'Grafana Only' rules block
|
||||||
|
###
|
||||||
|
|||||||
2
debian/build_virtualenv
vendored
2
debian/build_virtualenv
vendored
@@ -35,7 +35,7 @@ TEMP_VENV="$(mktemp -d)"
|
|||||||
python3 -m venv "$TEMP_VENV"
|
python3 -m venv "$TEMP_VENV"
|
||||||
source "$TEMP_VENV/bin/activate"
|
source "$TEMP_VENV/bin/activate"
|
||||||
pip install -U pip
|
pip install -U pip
|
||||||
pip install poetry==2.1.1 poetry-plugin-export==1.9.0
|
pip install poetry==1.3.2
|
||||||
poetry export \
|
poetry export \
|
||||||
--extras all \
|
--extras all \
|
||||||
--extras test \
|
--extras test \
|
||||||
|
|||||||
632
debian/changelog
vendored
632
debian/changelog
vendored
@@ -1,636 +1,8 @@
|
|||||||
matrix-synapse-py3 (1.144.0) stable; urgency=medium
|
matrix-synapse-py3 (1.109.0+nmu1) UNRELEASED; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.144.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Dec 2025 08:30:40 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.144.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.144.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Dec 2025 09:11:19 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.143.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.143.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Nov 2025 08:44:56 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.143.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.143.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 17:36:08 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.143.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.143.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 13:08:39 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 12:25:23 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Nov 2025 09:45:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Nov 2025 10:54:42 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 17:39:11 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 16:21:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 13:20:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 29 Oct 2025 11:01:43 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Oct 2025 10:20:26 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.140.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.140.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.140.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 12:54:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* The licensing specifier has been updated to add an optional
|
|
||||||
`LicenseRef-Element-Commercial` license. The code was already licensed in
|
|
||||||
this manner - the debian metadata was just not updated to reflect it.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:17:17 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.138.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Sep 2025 12:16:14 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.137.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.137.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Aug 2025 10:23:41 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.137.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.137.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Aug 2025 10:55:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Aug 2025 13:18:03 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 12:18:52 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Aug 2025 08:13:30 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:52:01 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:13:15 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Aug 2025 13:12:28 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jul 2025 12:19:14 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Jul 2025 12:08:37 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.134.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.134.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jul 2025 14:22:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.134.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Jul 2025 11:27:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.133.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.133.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jul 2025 13:13:24 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.133.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.133.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Jun 2025 11:57:47 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.132.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.132.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jun 2025 13:16:20 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.132.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.132.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jun 2025 11:15:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.131.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.131.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Jun 2025 14:36:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.131.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.131.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 28 May 2025 10:25:44 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.130.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.130.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 May 2025 08:34:13 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.130.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.130.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 May 2025 10:44:04 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.129.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 May 2025 12:22:11 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.129.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Apr 2025 13:13:16 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.129.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Apr 2025 10:47:43 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.128.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.128.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Apr 2025 14:09:54 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.128.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* Update Poetry to 2.1.1.
|
|
||||||
* New synapse release 1.128.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Apr 2025 14:35:33 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Mar 2025 21:07:31 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Mar 2025 12:04:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Mar 2025 13:30:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Mar 2025 13:11:29 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Mar 2025 15:45:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Mar 2025 14:29:12 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Mar 2025 13:11:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.125.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.125.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Feb 2025 08:10:07 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.125.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.125.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Feb 2025 13:32:49 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Feb 2025 11:55:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Feb 2025 13:42:55 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Feb 2025 16:35:53 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Feb 2025 11:53:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.123.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.123.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2025 08:37:34 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.123.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.123.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Jan 2025 14:39:57 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.122.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.122.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Jan 2025 14:14:14 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.122.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.122.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Jan 2025 14:06:19 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.121.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.121.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 18:24:48 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.121.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.121.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 13:12:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.121.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.121.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 04 Dec 2024 14:47:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.120.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 15:43:37 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.120.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 09:07:57 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.120.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Nov 2024 13:10:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.120.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Nov 2024 15:02:21 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.119.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.119.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Nov 2024 13:57:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.119.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.119.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Nov 2024 14:33:02 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.119.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.119.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Nov 2024 08:59:43 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.118.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.118.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Oct 2024 15:29:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.118.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.118.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Oct 2024 11:48:14 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.117.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.117.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Oct 2024 10:46:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.117.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.117.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Oct 2024 14:37:11 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.116.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.116.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Oct 2024 11:14:07 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.116.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.116.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 26 Sep 2024 13:28:43 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.116.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.116.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 25 Sep 2024 09:34:07 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.115.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.115.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Sep 2024 14:32:10 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.115.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.115.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 12 Sep 2024 11:10:15 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.115.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.115.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Sep 2024 08:39:09 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.114.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.114.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 02 Sep 2024 15:14:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.114.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.114.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 16:38:05 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.114.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.114.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 15:35:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.114.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.114.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Aug 2024 12:55:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.113.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.113.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Aug 2024 14:36:56 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.113.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.113.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Aug 2024 12:23:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.112.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.112.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Jul 2024 17:15:48 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.112.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.112.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Jul 2024 08:58:55 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.111.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.111.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Jul 2024 16:13:52 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.111.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.111.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Jul 2024 12:42:46 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.111.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.111.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 10 Jul 2024 08:46:54 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.111.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.111.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Jul 2024 09:49:25 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.110.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.110.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 03 Jul 2024 09:08:59 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.110.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.110.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Jul 2024 08:28:56 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.110.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.110.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Jun 2024 18:14:48 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.110.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* `register_new_matrix_user` now supports a --password-file and a --exists-ok flag.
|
* `register_new_matrix_user` now supports a --password-file and a --exists-ok flag.
|
||||||
* New Synapse release 1.110.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Jun 2024 14:07:56 +0200
|
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jun 2024 13:29:36 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (1.109.0) stable; urgency=medium
|
matrix-synapse-py3 (1.109.0) stable; urgency=medium
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user