mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-15 02:00:21 +00:00
Compare commits
6 Commits
v1.143.0rc
...
dkasak/par
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b599ccb73 | ||
|
|
0c67c41b7b | ||
|
|
47a5799b58 | ||
|
|
a5f61b044b | ||
|
|
fd8c9aa1e2 | ||
|
|
d43f6df650 |
@@ -1,10 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
set -xeu
|
|
||||||
|
|
||||||
# On 32-bit Linux platforms, we need libatomic1 to use rustup
|
|
||||||
if command -v yum &> /dev/null; then
|
|
||||||
yum install -y libatomic
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Install a Rust toolchain
|
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal
|
|
||||||
@@ -25,6 +25,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from typing import Optional
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
from packaging.tags import Tag
|
from packaging.tags import Tag
|
||||||
@@ -79,7 +80,7 @@ def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
|||||||
return new_wheel_file
|
return new_wheel_file
|
||||||
|
|
||||||
|
|
||||||
def main(wheel_file: str, dest_dir: str, archs: str | None) -> None:
|
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
|
||||||
"""Entry point"""
|
"""Entry point"""
|
||||||
|
|
||||||
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
||||||
|
|||||||
@@ -35,58 +35,49 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
|||||||
|
|
||||||
# First calculate the various trial jobs.
|
# First calculate the various trial jobs.
|
||||||
#
|
#
|
||||||
# For PRs, we only run each type of test with the oldest and newest Python
|
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||||
# version that's supported. The oldest version ensures we don't accidentally
|
# is Python 3.8 right now)
|
||||||
# introduce syntax or code that's too new, and the newest ensures we don't use
|
|
||||||
# code that's been dropped in the latest supported Python version.
|
|
||||||
|
|
||||||
trial_sqlite_tests = [
|
trial_sqlite_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.8",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"python-version": "3.14",
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "all",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
if not IS_PR:
|
if not IS_PR:
|
||||||
# Otherwise, check all supported Python versions.
|
|
||||||
#
|
|
||||||
# Avoiding running all of these versions on every PR saves on CI time.
|
|
||||||
trial_sqlite_tests.extend(
|
trial_sqlite_tests.extend(
|
||||||
{
|
{
|
||||||
"python-version": version,
|
"python-version": version,
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
for version in ("3.11", "3.12", "3.13")
|
for version in ("3.9", "3.10", "3.11", "3.12")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only test postgres against the earliest and latest Python versions that we
|
|
||||||
# support in order to save on CI time.
|
|
||||||
trial_postgres_tests = [
|
trial_postgres_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.8",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "14",
|
"postgres-version": "11",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"python-version": "3.14",
|
|
||||||
"database": "postgres",
|
|
||||||
"postgres-version": "17",
|
|
||||||
"extras": "all",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Ensure that Synapse passes unit tests even with no extra dependencies installed.
|
if not IS_PR:
|
||||||
|
trial_postgres_tests.append(
|
||||||
|
{
|
||||||
|
"python-version": "3.12",
|
||||||
|
"database": "postgres",
|
||||||
|
"postgres-version": "16",
|
||||||
|
"extras": "all",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
trial_no_extra_tests = [
|
trial_no_extra_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.8",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "",
|
"extras": "",
|
||||||
}
|
}
|
||||||
@@ -108,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
|
|||||||
|
|
||||||
# First calculate the various sytest jobs.
|
# First calculate the various sytest jobs.
|
||||||
#
|
#
|
||||||
# For each type of test we only run on bookworm on PRs
|
# For each type of test we only run on focal on PRs
|
||||||
|
|
||||||
|
|
||||||
sytest_tests = [
|
sytest_tests = [
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
@@ -136,11 +127,11 @@ if not IS_PR:
|
|||||||
sytest_tests.extend(
|
sytest_tests.extend(
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "focal",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -11,12 +11,12 @@ with open("poetry.lock", "rb") as f:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
lock_version = lockfile["metadata"]["lock-version"]
|
lock_version = lockfile["metadata"]["lock-version"]
|
||||||
assert lock_version == "2.1"
|
assert lock_version == "2.0"
|
||||||
except Exception:
|
except Exception:
|
||||||
print(
|
print(
|
||||||
"""\
|
"""\
|
||||||
Lockfile is not version 2.1. You probably need to upgrade poetry on your local box
|
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
|
||||||
and re-run `poetry lock`. See the Poetry cheat sheet at
|
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
|
||||||
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# this script is run by GitHub Actions in a plain `jammy` container; it
|
# this script is run by GitHub Actions in a plain `focal` container; it
|
||||||
# - installs the minimal system requirements, and poetry;
|
# - installs the minimal system requirements, and poetry;
|
||||||
# - patches the project definition file to refer to old versions only;
|
# - patches the project definition file to refer to old versions only;
|
||||||
# - creates a venv with these old versions using poetry; and finally
|
# - creates a venv with these old versions using poetry; and finally
|
||||||
@@ -16,23 +16,20 @@ export VIRTUALENV_NO_DOWNLOAD=1
|
|||||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||||
|
|
||||||
# Patch the project definitions in-place:
|
# Patch the project definitions in-place:
|
||||||
# - `-E` use extended regex syntax.
|
# - Replace all lower and tilde bounds with exact bounds
|
||||||
# - Don't modify the line that defines required Python versions.
|
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
||||||
# - Replace all lower and tilde bounds with exact bounds.
|
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||||
# - Replace all caret bounds with exact bounds.
|
|
||||||
# - Delete all lines referring to psycopg2 - so no testing of postgres support.
|
|
||||||
# - Use pyopenssl 17.0, which is the oldest version that works with
|
# - Use pyopenssl 17.0, which is the oldest version that works with
|
||||||
# a `cryptography` compiled against OpenSSL 1.1.
|
# a `cryptography` compiled against OpenSSL 1.1.
|
||||||
# - Omit systemd: we're not logging to journal here.
|
# - Omit systemd: we're not logging to journal here.
|
||||||
|
|
||||||
sed -i -E '
|
sed -i \
|
||||||
/^\s*requires-python\s*=/b
|
-e "s/[~>]=/==/g" \
|
||||||
s/[~>]=/==/g
|
-e '/^python = "^/!s/\^/==/g' \
|
||||||
s/\^/==/g
|
-e "/psycopg2/d" \
|
||||||
/psycopg2/d
|
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||||
s/pyOpenSSL\s*==\s*16\.0\.0"/pyOpenSSL==17.0.0"/
|
-e '/systemd/d' \
|
||||||
/systemd/d
|
pyproject.toml
|
||||||
' pyproject.toml
|
|
||||||
|
|
||||||
echo "::group::Patched pyproject.toml"
|
echo "::group::Patched pyproject.toml"
|
||||||
cat pyproject.toml
|
cat pyproject.toml
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ poetry run update_synapse_database --database-config .ci/postgres-config-unporte
|
|||||||
echo "+++ Comparing ported schema with unported schema"
|
echo "+++ Comparing ported schema with unported schema"
|
||||||
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
||||||
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse_unported > unported.sql
|
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse > ported.sql
|
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
|
||||||
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
||||||
diff -u unported.sql ported.sql | tee schema_diff
|
diff -u unported.sql ported.sql | tee schema_diff
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# 1) Resolve project ID.
|
|
||||||
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
|
|
||||||
|
|
||||||
# 2) Find existing item (project card) for this issue.
|
|
||||||
ITEM_ID=$(
|
|
||||||
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
|
|
||||||
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
|
|
||||||
)
|
|
||||||
|
|
||||||
# 3) If one doesn't exist, add this issue to the project.
|
|
||||||
if [ -z "${ITEM_ID:-}" ]; then
|
|
||||||
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 4) Get Status field id + the option id for TARGET_STATUS.
|
|
||||||
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
|
|
||||||
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
|
|
||||||
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
|
|
||||||
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
|
|
||||||
|
|
||||||
if [ -z "${OPTION_ID:-}" ]; then
|
|
||||||
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 5) Set Status (moves item to the matching column in the board view).
|
|
||||||
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"
|
|
||||||
@@ -26,8 +26,3 @@ c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
|||||||
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
||||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||||
|
|
||||||
# Use type hinting generics in standard collections (https://github.com/element-hq/synapse/pull/19046)
|
|
||||||
fc244bb592aa481faf28214a2e2ce3bb4e95d990
|
|
||||||
|
|
||||||
# Write union types as X | Y where possible (https://github.com/element-hq/synapse/pull/19111)
|
|
||||||
fcac7e0282b074d4bd3414d1c9c181e9701875d9
|
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -2,4 +2,4 @@
|
|||||||
(using a matrix.org account if necessary). We do not use GitHub issues for
|
(using a matrix.org account if necessary). We do not use GitHub issues for
|
||||||
support.
|
support.
|
||||||
|
|
||||||
**If you want to report a security issue** please see https://element.io/security/security-disclosure-policy
|
**If you want to report a security issue** please see https://matrix.org/security-disclosure-policy/
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
2
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -7,7 +7,7 @@ body:
|
|||||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
**THIS IS NOT A SUPPORT CHANNEL!**
|
||||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
|
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
|
||||||
|
|
||||||
If you want to report a security issue, please see https://element.io/security/security-disclosure-policy
|
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
||||||
|
|
||||||
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
|
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
|
||||||
|
|
||||||
|
|||||||
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -9,4 +9,5 @@
|
|||||||
- End with either a period (.) or an exclamation mark (!).
|
- End with either a period (.) or an exclamation mark (!).
|
||||||
- Start with a capital letter.
|
- Start with a capital letter.
|
||||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
||||||
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct
|
||||||
|
(run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
||||||
|
|||||||
148
.github/workflows/docker.yml
vendored
148
.github/workflows/docker.yml
vendored
@@ -5,7 +5,7 @@ name: Build docker images
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags: ["v*"]
|
tags: ["v*"]
|
||||||
branches: [master, main, develop]
|
branches: [ master, main, develop ]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
@@ -14,24 +14,26 @@ permissions:
|
|||||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build and push image for ${{ matrix.platform }}
|
runs-on: ubuntu-latest
|
||||||
runs-on: ${{ matrix.runs_on }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- platform: linux/amd64
|
|
||||||
runs_on: ubuntu-24.04
|
|
||||||
suffix: linux-amd64
|
|
||||||
- platform: linux/arm64
|
|
||||||
runs_on: ubuntu-24.04-arm
|
|
||||||
suffix: linux-arm64
|
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set up QEMU
|
||||||
|
id: qemu
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Inspect builder
|
||||||
|
run: docker buildx inspect
|
||||||
|
|
||||||
|
- name: Install Cosign
|
||||||
|
uses: sigstore/cosign-installer@v3.5.0
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Extract version from pyproject.toml
|
- name: Extract version from pyproject.toml
|
||||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||||
@@ -41,91 +43,25 @@ jobs:
|
|||||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push by digest
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
labels: |
|
|
||||||
gitsha1=${{ github.sha }}
|
|
||||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
|
||||||
tags: |
|
|
||||||
docker.io/matrixdotorg/synapse
|
|
||||||
ghcr.io/element-hq/synapse
|
|
||||||
file: "docker/Dockerfile"
|
|
||||||
platforms: ${{ matrix.platform }}
|
|
||||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
|
||||||
|
|
||||||
- name: Export digest
|
|
||||||
run: |
|
|
||||||
mkdir -p ${{ runner.temp }}/digests
|
|
||||||
digest="${{ steps.build.outputs.digest }}"
|
|
||||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
|
||||||
|
|
||||||
- name: Upload digest
|
|
||||||
uses: actions/upload-artifact@v5
|
|
||||||
with:
|
|
||||||
name: digests-${{ matrix.suffix }}
|
|
||||||
path: ${{ runner.temp }}/digests/*
|
|
||||||
if-no-files-found: error
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
merge:
|
|
||||||
name: Push merged images to ${{ matrix.repository }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
repository:
|
|
||||||
- docker.io/matrixdotorg/synapse
|
|
||||||
- ghcr.io/element-hq/synapse
|
|
||||||
|
|
||||||
needs:
|
|
||||||
- build
|
|
||||||
steps:
|
|
||||||
- name: Download digests
|
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
|
||||||
with:
|
|
||||||
path: ${{ runner.temp }}/digests
|
|
||||||
pattern: digests-*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
|
||||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Log in to GHCR
|
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
|
||||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
|
||||||
|
|
||||||
- name: Install Cosign
|
|
||||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
|
||||||
|
|
||||||
- name: Calculate docker image tag
|
- name: Calculate docker image tag
|
||||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
id: set-tag
|
||||||
|
uses: docker/metadata-action@master
|
||||||
with:
|
with:
|
||||||
images: ${{ matrix.repository }}
|
images: |
|
||||||
|
docker.io/matrixdotorg/synapse
|
||||||
|
ghcr.io/element-hq/synapse
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=false
|
latest=false
|
||||||
tags: |
|
tags: |
|
||||||
@@ -133,23 +69,31 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||||
type=pep440,pattern={{raw}}
|
type=pep440,pattern={{raw}}
|
||||||
type=sha
|
|
||||||
|
|
||||||
- name: Create manifest list and push
|
- name: Build and push all platforms
|
||||||
working-directory: ${{ runner.temp }}/digests
|
id: build-and-push
|
||||||
env:
|
uses: docker/build-push-action@v6
|
||||||
REPOSITORY: ${{ matrix.repository }}
|
with:
|
||||||
run: |
|
push: true
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
labels: |
|
||||||
$(printf "$REPOSITORY@sha256:%s " *)
|
gitsha1=${{ github.sha }}
|
||||||
|
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||||
|
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||||
|
file: "docker/Dockerfile"
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
- name: Sign each manifest
|
# arm64 builds OOM without the git fetch setting. c.f.
|
||||||
|
# https://github.com/rust-lang/cargo/issues/10583
|
||||||
|
build-args: |
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||||
|
|
||||||
|
- name: Sign the images with GitHub OIDC Token
|
||||||
env:
|
env:
|
||||||
REPOSITORY: ${{ matrix.repository }}
|
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||||
|
TAGS: ${{ steps.set-tag.outputs.tags }}
|
||||||
run: |
|
run: |
|
||||||
DIGESTS=""
|
images=""
|
||||||
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
|
for tag in ${TAGS}; do
|
||||||
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
|
images+="${tag}@${DIGEST} "
|
||||||
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
|
|
||||||
done
|
done
|
||||||
cosign sign --yes $DIGESTS
|
cosign sign --yes ${images}
|
||||||
|
|||||||
4
.github/workflows/docs-pr-netlify.yaml
vendored
4
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||||
- name: 📥 Download artifact
|
- name: 📥 Download artifact
|
||||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6
|
||||||
with:
|
with:
|
||||||
workflow: docs-pr.yaml
|
workflow: docs-pr.yaml
|
||||||
run_id: ${{ github.event.workflow_run.id }}
|
run_id: ${{ github.event.workflow_run.id }}
|
||||||
@@ -22,7 +22,7 @@ jobs:
|
|||||||
path: book
|
path: book
|
||||||
|
|
||||||
- name: 📤 Deploy to Netlify
|
- name: 📤 Deploy to Netlify
|
||||||
uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3
|
uses: matrix-org/netlify-pr-preview@v3
|
||||||
with:
|
with:
|
||||||
path: book
|
path: book
|
||||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||||
|
|||||||
8
.github/workflows/docs-pr.yaml
vendored
8
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
name: GitHub Pages
|
name: GitHub Pages
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: book
|
name: book
|
||||||
path: book
|
path: book
|
||||||
@@ -50,7 +50,7 @@ jobs:
|
|||||||
name: Check links in documentation
|
name: Check links in documentation
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||||
|
|||||||
16
.github/workflows/docs.yaml
vendored
16
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- pre
|
- pre
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -64,7 +64,7 @@ jobs:
|
|||||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
@@ -78,18 +78,6 @@ jobs:
|
|||||||
mdbook build
|
mdbook build
|
||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Prepare and publish schema files
|
|
||||||
run: |
|
|
||||||
sudo apt-get update && sudo apt-get install -y yq
|
|
||||||
mkdir -p book/schema
|
|
||||||
# Remove developer notice before publishing.
|
|
||||||
rm schema/v*/Do\ not\ edit\ files\ in\ this\ folder
|
|
||||||
# Copy schema files that are independent from current Synapse version.
|
|
||||||
cp -r -t book/schema schema/v*/
|
|
||||||
# Convert config schema from YAML source file to JSON.
|
|
||||||
yq < schema/synapse-config.schema.yaml \
|
|
||||||
> book/schema/synapse-config.schema.json
|
|
||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||||
|
|||||||
36
.github/workflows/fix_lint.yaml
vendored
36
.github/workflows/fix_lint.yaml
vendored
@@ -6,11 +6,6 @@ name: Attempt to automatically fix linting errors
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
|
||||||
# We use nightly so that `fmt` correctly groups together imports, and
|
|
||||||
# clippy correctly fixes up the benchmarks.
|
|
||||||
RUST_VERSION: nightly-2025-06-24
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fixup:
|
fixup:
|
||||||
name: Fix up
|
name: Fix up
|
||||||
@@ -18,28 +13,33 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
# We use nightly so that `fmt` correctly groups together imports, and
|
||||||
components: clippy, rustfmt
|
# clippy correctly fixes up the benchmarks.
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
toolchain: nightly-2022-12-01
|
||||||
|
components: rustfmt
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
install-project: "false"
|
install-project: "false"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
- name: Run ruff check
|
- name: Import order (isort)
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: poetry run ruff check --fix .
|
run: poetry run isort .
|
||||||
|
|
||||||
- name: Run ruff format
|
- name: Code style (black)
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: poetry run ruff format --quiet .
|
run: poetry run black .
|
||||||
|
|
||||||
|
- name: Semantic checks (ruff)
|
||||||
|
continue-on-error: true
|
||||||
|
run: poetry run ruff --fix .
|
||||||
|
|
||||||
- run: cargo clippy --all-features --fix -- -D warnings
|
- run: cargo clippy --all-features --fix -- -D warnings
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
@@ -47,6 +47,6 @@ jobs:
|
|||||||
- run: cargo fmt
|
- run: cargo fmt
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
|
||||||
- uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||||
with:
|
with:
|
||||||
commit_message: "Attempt to fix linting"
|
commit_message: "Attempt to fix linting"
|
||||||
|
|||||||
49
.github/workflows/latest_deps.yml
vendored
49
.github/workflows/latest_deps.yml
vendored
@@ -21,9 +21,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
check_repo:
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||||
@@ -42,25 +39,23 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||||
# poetry-core versions), so we install with poetry.
|
# poetry-core versions), so we install with poetry.
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
# Dump installed versions for debugging.
|
# Dump installed versions for debugging.
|
||||||
- run: poetry run pip list > before.txt
|
- run: poetry run pip list > before.txt
|
||||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||||
# `pip install matrix-synapse[all]` as closely as possible.
|
# `pip install matrix-synapse[all]` as closely as possible.
|
||||||
- run: poetry update --without dev
|
- run: poetry update --no-dev
|
||||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||||
- name: Remove unhelpful options from mypy config
|
- name: Remove unhelpful options from mypy config
|
||||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||||
@@ -77,13 +72,11 @@ jobs:
|
|||||||
postgres-version: "14"
|
postgres-version: "14"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||||
@@ -93,7 +86,7 @@ jobs:
|
|||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.postgres-version }}
|
postgres:${{ matrix.postgres-version }}
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: pip install .[all,test]
|
- run: pip install .[all,test]
|
||||||
@@ -139,9 +132,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- sytest-tag: bookworm
|
- sytest-tag: focal
|
||||||
|
|
||||||
- sytest-tag: bookworm
|
- sytest-tag: focal
|
||||||
postgres: postgres
|
postgres: postgres
|
||||||
workers: workers
|
workers: workers
|
||||||
redis: redis
|
redis: redis
|
||||||
@@ -152,13 +145,11 @@ jobs:
|
|||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- name: Ensure sytest runs `pip install`
|
- name: Ensure sytest runs `pip install`
|
||||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||||
@@ -173,7 +164,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -201,15 +192,15 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out synapse codebase
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -234,7 +225,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
|||||||
name: "Check locked dependencies have sdists"
|
name: "Check locked dependencies have sdists"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- run: pip install tomli
|
- run: pip install tomli
|
||||||
|
|||||||
10
.github/workflows/push_complement_image.yml
vendored
10
.github/workflows/push_complement_image.yml
vendored
@@ -33,29 +33,29 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout specific branch (debug build)
|
- name: Checkout specific branch (debug build)
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.branch }}
|
ref: ${{ inputs.branch }}
|
||||||
- name: Checkout clean copy of develop (scheduled build)
|
- name: Checkout clean copy of develop (scheduled build)
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule'
|
||||||
with:
|
with:
|
||||||
ref: develop
|
ref: develop
|
||||||
- name: Checkout clean copy of master (on-push)
|
- name: Checkout clean copy of master (on-push)
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
with:
|
with:
|
||||||
ref: master
|
ref: master
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Work out labels for complement image
|
- name: Work out labels for complement image
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||||
tags: |
|
tags: |
|
||||||
|
|||||||
117
.github/workflows/release-artifacts.yml
vendored
117
.github/workflows/release-artifacts.yml
vendored
@@ -27,10 +27,10 @@ jobs:
|
|||||||
name: "Calculate list of debian distros"
|
name: "Calculate list of debian distros"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: '3.x'
|
||||||
- id: set-distros
|
- id: set-distros
|
||||||
run: |
|
run: |
|
||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||||
@@ -55,18 +55,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: src
|
path: src
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
- name: Set up docker layer caching
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
@@ -74,9 +74,9 @@ jobs:
|
|||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
- name: Set up python
|
- name: Set up python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: '3.x'
|
||||||
|
|
||||||
- name: Build the packages
|
- name: Build the packages
|
||||||
# see https://github.com/docker/build-push-action/issues/252
|
# see https://github.com/docker/build-push-action/issues/252
|
||||||
@@ -91,31 +91,19 @@ jobs:
|
|||||||
rm -rf /tmp/.buildx-cache
|
rm -rf /tmp/.buildx-cache
|
||||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||||
|
|
||||||
- name: Artifact name
|
|
||||||
id: artifact-name
|
|
||||||
# We can't have colons in the upload name of the artifact, so we convert
|
|
||||||
# e.g. `debian:sid` to `sid`.
|
|
||||||
env:
|
|
||||||
DISTRO: ${{ matrix.distro }}
|
|
||||||
run: |
|
|
||||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
- name: Upload debs as artifacts
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
||||||
with:
|
with:
|
||||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
name: debs
|
||||||
path: debs/*
|
path: debs/*
|
||||||
|
|
||||||
build-wheels:
|
build-wheels:
|
||||||
name: Build wheels on ${{ matrix.os }}
|
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os:
|
os: [ubuntu-20.04, macos-12]
|
||||||
- ubuntu-24.04
|
arch: [x86_64, aarch64]
|
||||||
- ubuntu-24.04-arm
|
|
||||||
- macos-14 # This uses arm64
|
|
||||||
- macos-15-intel # This uses x86-64
|
|
||||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||||
# It is not read by the rest of the workflow.
|
# It is not read by the rest of the workflow.
|
||||||
is_pr:
|
is_pr:
|
||||||
@@ -124,44 +112,53 @@ jobs:
|
|||||||
exclude:
|
exclude:
|
||||||
# Don't build macos wheels on PR CI.
|
# Don't build macos wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "macos-15-intel"
|
os: "macos-12"
|
||||||
- is_pr: true
|
# Don't build aarch64 wheels on mac.
|
||||||
os: "macos-14"
|
- os: "macos-12"
|
||||||
|
arch: aarch64
|
||||||
# Don't build aarch64 wheels on PR CI.
|
# Don't build aarch64 wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "ubuntu-24.04-arm"
|
arch: aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||||
# here, because `python` on osx points to Python 2.7.
|
# here, because `python` on osx points to Python 2.7.
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
- name: Install cibuildwheel
|
||||||
run: python -m pip install cibuildwheel==3.2.1
|
run: python -m pip install cibuildwheel==2.19.1
|
||||||
|
|
||||||
|
- name: Set up QEMU to emulate aarch64
|
||||||
|
if: matrix.arch == 'aarch64'
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
|
|
||||||
|
- name: Build aarch64 wheels
|
||||||
|
if: matrix.arch == 'aarch64'
|
||||||
|
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Only build a single wheel on PR
|
- name: Only build a single wheel on PR
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV
|
run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
run: python -m cibuildwheel --output-dir wheelhouse
|
||||||
env:
|
env:
|
||||||
# The platforms that we build for are determined by the
|
# Skip testing for platforms which various libraries don't have wheels
|
||||||
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
|
# for, and so need extra build deps.
|
||||||
|
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||||
|
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||||
|
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||||
|
|
||||||
# We skip testing wheels for the following platforms in CI:
|
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
||||||
#
|
|
||||||
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
|
|
||||||
# musl: (TODO: investigate).
|
|
||||||
CIBW_TEST_SKIP: pp3*-* *musl*
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
|
||||||
with:
|
with:
|
||||||
name: Wheel-${{ matrix.os }}
|
name: Wheel
|
||||||
path: ./wheelhouse/*.whl
|
path: ./wheelhouse/*.whl
|
||||||
|
|
||||||
build-sdist:
|
build-sdist:
|
||||||
@@ -170,21 +167,22 @@ jobs:
|
|||||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: '3.10'
|
||||||
|
|
||||||
- run: pip install build
|
- run: pip install build
|
||||||
|
|
||||||
- name: Build sdist
|
- name: Build sdist
|
||||||
run: python -m build --sdist
|
run: python -m build --sdist
|
||||||
|
|
||||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
||||||
with:
|
with:
|
||||||
name: Sdist
|
name: Sdist
|
||||||
path: dist/*.tar.gz
|
path: dist/*.tar.gz
|
||||||
|
|
||||||
|
|
||||||
# if it's a tag, create a release and attach the artifacts to it
|
# if it's a tag, create a release and attach the artifacts to it
|
||||||
attach-assets:
|
attach-assets:
|
||||||
name: "Attach assets to release"
|
name: "Attach assets to release"
|
||||||
@@ -196,20 +194,19 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download all workflow run artifacts
|
- name: Download all workflow run artifacts
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
uses: actions/download-artifact@v3 # Don't upgrade to v4, it should match upload-artifact
|
||||||
- name: Build a tarball for the debs
|
- name: Build a tarball for the debs
|
||||||
# We need to merge all the debs uploads into one folder, then compress
|
run: tar -cvJf debs.tar.xz debs
|
||||||
# that.
|
|
||||||
run: |
|
|
||||||
mkdir debs
|
|
||||||
mv debs*/* debs/
|
|
||||||
tar -cvJf debs.tar.xz debs
|
|
||||||
- name: Attach to release
|
- name: Attach to release
|
||||||
|
uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
with:
|
||||||
gh release upload "${{ github.ref_name }}" \
|
files: |
|
||||||
Sdist/* \
|
Sdist/*
|
||||||
Wheel*/* \
|
Wheel/*
|
||||||
debs.tar.xz \
|
debs.tar.xz
|
||||||
--repo ${{ github.repository }}
|
# if it's not already published, keep the release as a draft.
|
||||||
|
draft: true
|
||||||
|
# mark it as a prerelease if the tag contains 'rc'.
|
||||||
|
prerelease: ${{ contains(github.ref, 'rc') }}
|
||||||
|
|||||||
57
.github/workflows/schema.yaml
vendored
57
.github/workflows/schema.yaml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: Schema
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- schema/**
|
|
||||||
- docs/usage/configuration/config_documentation.md
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
validate-schema:
|
|
||||||
name: Ensure Synapse config schema is valid
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- name: Install check-jsonschema
|
|
||||||
run: pip install check-jsonschema==0.33.0
|
|
||||||
|
|
||||||
- name: Validate meta schema
|
|
||||||
run: check-jsonschema --check-metaschema schema/v*/meta.schema.json
|
|
||||||
- name: Validate schema
|
|
||||||
run: |-
|
|
||||||
# Please bump on introduction of a new meta schema.
|
|
||||||
LATEST_META_SCHEMA_VERSION=v1
|
|
||||||
check-jsonschema \
|
|
||||||
--schemafile="schema/$LATEST_META_SCHEMA_VERSION/meta.schema.json" \
|
|
||||||
schema/synapse-config.schema.yaml
|
|
||||||
- name: Validate default config
|
|
||||||
# Populates the empty instance with default values and checks against the schema.
|
|
||||||
run: |-
|
|
||||||
echo "{}" | check-jsonschema \
|
|
||||||
--fill-defaults --schemafile=schema/synapse-config.schema.yaml -
|
|
||||||
|
|
||||||
check-doc-generation:
|
|
||||||
name: Ensure generated documentation is up-to-date
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- name: Install PyYAML
|
|
||||||
run: pip install PyYAML==6.0.2
|
|
||||||
|
|
||||||
- name: Regenerate config documentation
|
|
||||||
run: |
|
|
||||||
scripts-dev/gen_config_documentation.py \
|
|
||||||
schema/synapse-config.schema.yaml \
|
|
||||||
> docs/usage/configuration/config_documentation.md
|
|
||||||
- name: Error in case of any differences
|
|
||||||
# Errors if there are now any modified files (untracked files are ignored).
|
|
||||||
run: 'git diff --exit-code'
|
|
||||||
268
.github/workflows/tests.yml
vendored
268
.github/workflows/tests.yml
vendored
@@ -11,9 +11,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||||
# don't modify Rust code.
|
# don't modify Rust code.
|
||||||
@@ -24,9 +21,8 @@ jobs:
|
|||||||
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
|
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
|
||||||
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
|
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
|
||||||
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
||||||
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
- uses: dorny/paths-filter@v3
|
||||||
id: filter
|
id: filter
|
||||||
# We only check on PRs
|
# We only check on PRs
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
@@ -77,25 +73,20 @@ jobs:
|
|||||||
- 'poetry.lock'
|
- 'poetry.lock'
|
||||||
- '.github/workflows/tests.yml'
|
- '.github/workflows/tests.yml'
|
||||||
|
|
||||||
linting_readme:
|
|
||||||
- 'README.rst'
|
|
||||||
|
|
||||||
check-sampleconfig:
|
check-sampleconfig:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||||
- run: poetry run scripts-dev/config-lint.sh
|
- run: poetry run scripts-dev/config-lint.sh
|
||||||
@@ -106,8 +97,8 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||||
@@ -116,8 +107,8 @@ jobs:
|
|||||||
check-lockfile:
|
check-lockfile:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: .ci/scripts/check_lockfile.py
|
- run: .ci/scripts/check_lockfile.py
|
||||||
@@ -129,19 +120,22 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "2.1.1"
|
|
||||||
install-project: "false"
|
install-project: "false"
|
||||||
|
|
||||||
- name: Run ruff check
|
- name: Import order (isort)
|
||||||
run: poetry run ruff check --output-format=github .
|
run: poetry run isort --check --diff .
|
||||||
|
|
||||||
- name: Run ruff format
|
- name: Code style (black)
|
||||||
run: poetry run ruff format --check .
|
run: poetry run black --check --diff .
|
||||||
|
|
||||||
|
- name: Semantic checks (ruff)
|
||||||
|
# --quiet suppresses the update check.
|
||||||
|
run: poetry run ruff --quiet .
|
||||||
|
|
||||||
lint-mypy:
|
lint-mypy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -151,16 +145,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
# We want to make use of type hints in optional dependencies too.
|
# We want to make use of type hints in optional dependencies too.
|
||||||
extras: all
|
extras: all
|
||||||
@@ -169,12 +161,11 @@ jobs:
|
|||||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
||||||
# To make CI green, err towards caution and install the project.
|
# To make CI green, err towards caution and install the project.
|
||||||
install-project: "true"
|
install-project: "true"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
# Cribbed from
|
# Cribbed from
|
||||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||||
- name: Restore/persist mypy's cache
|
- name: Restore/persist mypy's cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
.mypy_cache
|
.mypy_cache
|
||||||
@@ -187,7 +178,7 @@ jobs:
|
|||||||
lint-crlf:
|
lint-crlf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Check line endings
|
- name: Check line endings
|
||||||
run: scripts-dev/check_line_terminators.sh
|
run: scripts-dev/check_line_terminators.sh
|
||||||
|
|
||||||
@@ -195,11 +186,11 @@ jobs:
|
|||||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||||
@@ -207,20 +198,37 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||||
|
|
||||||
|
lint-pydantic:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: changes
|
||||||
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
- name: Install Rust
|
||||||
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
|
with:
|
||||||
|
poetry-version: "1.3.2"
|
||||||
|
extras: "all"
|
||||||
|
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||||
|
|
||||||
lint-clippy:
|
lint-clippy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
- uses: Swatinem/rust-cache@v2
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- run: cargo clippy -- -D warnings
|
- run: cargo clippy -- -D warnings
|
||||||
|
|
||||||
@@ -232,87 +240,35 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2025-04-23
|
toolchain: nightly-2022-12-01
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo clippy --all-features -- -D warnings
|
- run: cargo clippy --all-features -- -D warnings
|
||||||
|
|
||||||
lint-rust:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
|
||||||
with:
|
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
|
||||||
with:
|
|
||||||
# Install like a normal project from source with all optional dependencies
|
|
||||||
extras: all
|
|
||||||
install-project: "true"
|
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
|
|
||||||
# The `::error::` syntax is using GitHub Actions' error annotations, see
|
|
||||||
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
|
|
||||||
run: |
|
|
||||||
if git diff --quiet Cargo.lock; then
|
|
||||||
echo "Cargo.lock is up to date"
|
|
||||||
else
|
|
||||||
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
|
|
||||||
git diff --exit-code Cargo.lock
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
|
|
||||||
# for some of the unstable options we use in `.rustfmt.toml`.
|
|
||||||
lint-rustfmt:
|
lint-rustfmt:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
# We use nightly so that we can use some unstable options that we use in
|
# We use nightly so that it correctly groups together imports
|
||||||
# `.rustfmt.toml`.
|
toolchain: nightly-2022-12-01
|
||||||
toolchain: nightly-2025-04-23
|
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo fmt --check
|
- run: cargo fmt --check
|
||||||
|
|
||||||
# This is to detect issues with the rst file, which can otherwise cause issues
|
|
||||||
# when uploading packages to PyPi.
|
|
||||||
lint-readme:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- run: "pip install rstcheck"
|
|
||||||
- run: "rstcheck --report-level=WARNING README.rst"
|
|
||||||
|
|
||||||
# Dummy step to gate other tests on without repeating the whole list
|
# Dummy step to gate other tests on without repeating the whole list
|
||||||
linting-done:
|
linting-done:
|
||||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||||
@@ -321,17 +277,16 @@ jobs:
|
|||||||
- lint-mypy
|
- lint-mypy
|
||||||
- lint-crlf
|
- lint-crlf
|
||||||
- lint-newsfile
|
- lint-newsfile
|
||||||
|
- lint-pydantic
|
||||||
- check-sampleconfig
|
- check-sampleconfig
|
||||||
- check-schema-delta
|
- check-schema-delta
|
||||||
- check-lockfile
|
- check-lockfile
|
||||||
- lint-clippy
|
- lint-clippy
|
||||||
- lint-clippy-nightly
|
- lint-clippy-nightly
|
||||||
- lint-rust
|
|
||||||
- lint-rustfmt
|
- lint-rustfmt
|
||||||
- lint-readme
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
- uses: matrix-org/done-action@v2
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
@@ -342,11 +297,10 @@ jobs:
|
|||||||
lint
|
lint
|
||||||
lint-mypy
|
lint-mypy
|
||||||
lint-newsfile
|
lint-newsfile
|
||||||
|
lint-pydantic
|
||||||
lint-clippy
|
lint-clippy
|
||||||
lint-clippy-nightly
|
lint-clippy-nightly
|
||||||
lint-rust
|
|
||||||
lint-rustfmt
|
lint-rustfmt
|
||||||
lint-readme
|
|
||||||
|
|
||||||
|
|
||||||
calculate-test-jobs:
|
calculate-test-jobs:
|
||||||
@@ -354,8 +308,8 @@ jobs:
|
|||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- id: get-matrix
|
- id: get-matrix
|
||||||
@@ -375,7 +329,7 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -390,15 +344,13 @@ jobs:
|
|||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.job.python-version }}
|
python-version: ${{ matrix.job.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: ${{ matrix.job.extras }}
|
extras: ${{ matrix.job.extras }}
|
||||||
- name: Await PostgreSQL
|
- name: Await PostgreSQL
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -429,26 +381,24 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- linting-done
|
- linting-done
|
||||||
- changes
|
- changes
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
# There aren't wheels for some of the older deps, so we need to install
|
||||||
# their build dependencies
|
# their build dependencies
|
||||||
- run: |
|
- run: |
|
||||||
sudo apt-get -qq update
|
sudo apt-get -qq update
|
||||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||||
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.8'
|
||||||
|
|
||||||
- name: Prepare old deps
|
- name: Prepare old deps
|
||||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||||
@@ -492,17 +442,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy-3.10"]
|
python-version: ["pypy-3.8"]
|
||||||
extras: ["all"]
|
extras: ["all"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: ${{ matrix.extras }}
|
extras: ${{ matrix.extras }}
|
||||||
- run: poetry run trial --jobs=2 tests
|
- run: poetry run trial --jobs=2 tests
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
@@ -546,15 +496,13 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Prepare test blacklist
|
- name: Prepare test blacklist
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
run: /bootstrap.sh synapse
|
run: /bootstrap.sh synapse
|
||||||
@@ -563,7 +511,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||||
@@ -593,11 +541,11 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "postgres"
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
- run: .ci/scripts/test_export_data_command.sh
|
||||||
env:
|
env:
|
||||||
@@ -616,11 +564,11 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- python-version: "3.10"
|
- python-version: "3.8"
|
||||||
postgres-version: "14"
|
postgres-version: "11"
|
||||||
|
|
||||||
- python-version: "3.14"
|
- python-version: "3.11"
|
||||||
postgres-version: "17"
|
postgres-version: "15"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
@@ -637,7 +585,7 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Add PostgreSQL apt repository
|
- name: Add PostgreSQL apt repository
|
||||||
# We need a version of pg_dump that can handle the version of
|
# We need a version of pg_dump that can handle the version of
|
||||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||||
@@ -648,10 +596,10 @@ jobs:
|
|||||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "postgres"
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
- run: .ci/scripts/test_synapse_port_db.sh
|
||||||
id: run_tester_script
|
id: run_tester_script
|
||||||
@@ -661,7 +609,7 @@ jobs:
|
|||||||
PGPASSWORD: postgres
|
PGPASSWORD: postgres
|
||||||
PGDATABASE: postgres
|
PGDATABASE: postgres
|
||||||
- name: "Upload schema differences"
|
- name: "Upload schema differences"
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||||
with:
|
with:
|
||||||
name: Schema dumps
|
name: Schema dumps
|
||||||
@@ -691,21 +639,19 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout synapse codebase
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -728,13 +674,11 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- run: cargo test
|
- run: cargo test
|
||||||
|
|
||||||
@@ -748,13 +692,13 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo bench --no-run
|
- run: cargo bench --no-run
|
||||||
|
|
||||||
@@ -773,7 +717,7 @@ jobs:
|
|||||||
- linting-done
|
- linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
- uses: matrix-org/done-action@v2
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/triage-incoming.yml
vendored
2
.github/workflows/triage-incoming.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
triage:
|
triage:
|
||||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3
|
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
|
||||||
with:
|
with:
|
||||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||||
content_id: ${{ github.event.issue.node_id }}
|
content_id: ${{ github.event.issue.node_id }}
|
||||||
|
|||||||
49
.github/workflows/triage_labelled.yml
vendored
49
.github/workflows/triage_labelled.yml
vendored
@@ -6,26 +6,39 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
move_needs_info:
|
move_needs_info:
|
||||||
|
name: Move X-Needs-Info on the triage board
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: >
|
if: >
|
||||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
env:
|
|
||||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
PROJECT_OWNER: matrix-org
|
|
||||||
# Backend issue triage board.
|
|
||||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
|
||||||
PROJECT_NUMBER: 67
|
|
||||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
|
||||||
# This field is case-sensitive.
|
|
||||||
TARGET_STATUS: Needs info
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/add-to-project@main
|
||||||
|
id: add_project
|
||||||
with:
|
with:
|
||||||
# Only clone the script file we care about, instead of the whole repo.
|
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
- name: Set status
|
||||||
- name: Ensure issue exists on the board, then set Status
|
env:
|
||||||
run: .ci/scripts/triage_labelled_issue.sh
|
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh api graphql -f query='
|
||||||
|
mutation(
|
||||||
|
$project: ID!
|
||||||
|
$item: ID!
|
||||||
|
$fieldid: ID!
|
||||||
|
$columnid: String!
|
||||||
|
) {
|
||||||
|
updateProjectV2ItemFieldValue(
|
||||||
|
input: {
|
||||||
|
projectId: $project
|
||||||
|
itemId: $item
|
||||||
|
fieldId: $fieldid
|
||||||
|
value: {
|
||||||
|
singleSelectOptionId: $columnid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
projectV2Item {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
||||||
|
|||||||
49
.github/workflows/twisted_trunk.yml
vendored
49
.github/workflows/twisted_trunk.yml
vendored
@@ -20,9 +20,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
check_repo:
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||||
@@ -43,19 +40,16 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
||||||
@@ -70,20 +64,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
extras: "all test"
|
extras: "all test"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
@@ -108,22 +99,20 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# We're using bookworm because that's what Debian oldstable is at the time of writing.
|
# We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
|
||||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||||
image: matrixdotorg/sytest-synapse:bookworm
|
image: matrixdotorg/sytest-synapse:focal
|
||||||
volumes:
|
volumes:
|
||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
|
||||||
|
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||||
@@ -147,7 +136,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -175,14 +164,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v4 for synapse
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -192,11 +181,11 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -x
|
set -x
|
||||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||||
pipx install poetry==2.1.1
|
pipx install poetry==1.3.2
|
||||||
|
|
||||||
poetry remove -n twisted
|
poetry remove -n twisted
|
||||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
poetry lock
|
poetry lock --no-update
|
||||||
working-directory: synapse
|
working-directory: synapse
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
@@ -217,7 +206,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -47,7 +47,6 @@ __pycache__/
|
|||||||
/.idea/
|
/.idea/
|
||||||
/.ropeproject/
|
/.ropeproject/
|
||||||
/.vscode/
|
/.vscode/
|
||||||
/.zed/
|
|
||||||
|
|
||||||
# build products
|
# build products
|
||||||
!/.coveragerc
|
!/.coveragerc
|
||||||
|
|||||||
@@ -1,6 +1 @@
|
|||||||
# Unstable options are only available on a nightly toolchain and must be opted into
|
|
||||||
unstable_features = true
|
|
||||||
|
|
||||||
# `group_imports` is an unstable option that requires nightly Rust toolchain. Tracked by
|
|
||||||
# https://github.com/rust-lang/rustfmt/issues/5083
|
|
||||||
group_imports = "StdExternalCrate"
|
group_imports = "StdExternalCrate"
|
||||||
|
|||||||
3965
CHANGES.md
3965
CHANGES.md
File diff suppressed because it is too large
Load Diff
1521
Cargo.lock
generated
1521
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +0,0 @@
|
|||||||
Licensees holding a valid commercial license with Element may use this
|
|
||||||
software in accordance with the terms contained in a written agreement
|
|
||||||
between you and Element.
|
|
||||||
|
|
||||||
To purchase a commercial license please contact our sales team at
|
|
||||||
licensing@element.io
|
|
||||||
90
README.rst
90
README.rst
@@ -1,35 +1,34 @@
|
|||||||
.. image:: ./docs/element_logo_white_bg.svg
|
.. image:: https://github.com/element-hq/product/assets/87339233/7abf477a-5277-47f3-be44-ea44917d8ed7
|
||||||
:height: 60px
|
:height: 60px
|
||||||
|
|
||||||
**Element Synapse - Matrix homeserver implementation**
|
===========================================================================================================
|
||||||
|
Element Synapse - Matrix homeserver implementation |support| |development| |documentation| |license| |pypi| |python|
|
||||||
|
===========================================================================================================
|
||||||
|
|
||||||
|support| |development| |documentation| |license| |pypi| |python|
|
Synapse is an open source `Matrix <https://matrix.org>`_ homeserver
|
||||||
|
|
||||||
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
|
|
||||||
implementation, written and maintained by `Element <https://element.io>`_.
|
implementation, written and maintained by `Element <https://element.io>`_.
|
||||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
`Matrix <https://github.com/matrix-org>`_ is the open standard for
|
||||||
secure and interoperable real-time communications. You can directly run
|
secure and interoperable real time communications. You can directly run
|
||||||
and manage the source code in this repository, available under an AGPL
|
and manage the source code in this repository, available under an AGPL
|
||||||
license (or alternatively under a commercial license from Element).
|
license. There is no support provided from Element unless you have a
|
||||||
There is no support provided by Element unless you have a
|
subscription.
|
||||||
subscription from Element.
|
|
||||||
|
|
||||||
Subscription
|
Subscription alternative
|
||||||
============
|
------------------------
|
||||||
|
|
||||||
For those that need an enterprise-ready solution, Element
|
Alternatively, for those that need an enterprise-ready solution, Element
|
||||||
Server Suite (ESS) is `available via subscription <https://element.io/pricing>`_.
|
Server Suite (ESS) is `available as a subscription <https://element.io/pricing>`_.
|
||||||
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
||||||
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
||||||
giving admins the power to easily manage an organization-wide
|
giving admins the power to easily manage an organization-wide
|
||||||
deployment. It includes advanced identity management, auditing,
|
deployment. It includes advanced identity management, auditing,
|
||||||
moderation and data retention options as well as Long-Term Support and
|
moderation and data retention options as well as Long Term Support and
|
||||||
SLAs. ESS supports any Matrix-compatible client.
|
SLAs. ESS can be used to support any Matrix-based frontend client.
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
🛠️ Installation and configuration
|
🛠️ Installing and configuration
|
||||||
==================================
|
===============================
|
||||||
|
|
||||||
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
||||||
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
||||||
@@ -120,7 +119,7 @@ impact to other applications will be minimal.
|
|||||||
|
|
||||||
|
|
||||||
🧪 Testing a new installation
|
🧪 Testing a new installation
|
||||||
=============================
|
============================
|
||||||
|
|
||||||
The easiest way to try out your new Synapse installation is by connecting to it
|
The easiest way to try out your new Synapse installation is by connecting to it
|
||||||
from a web client.
|
from a web client.
|
||||||
@@ -133,7 +132,7 @@ connect from a client: see
|
|||||||
An easy way to get started is to login or register via Element at
|
An easy way to get started is to login or register via Element at
|
||||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||||
You will need to change the server you are logging into from ``matrix.org``
|
You will need to change the server you are logging into from ``matrix.org``
|
||||||
and instead specify a homeserver URL of ``https://<server_name>:8448``
|
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||||
If you prefer to use another client, refer to our
|
If you prefer to use another client, refer to our
|
||||||
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
||||||
@@ -159,33 +158,34 @@ it:
|
|||||||
|
|
||||||
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
||||||
the public internet. Without it, anyone can freely register accounts on your homeserver.
|
the public internet. Without it, anyone can freely register accounts on your homeserver.
|
||||||
This can be exploited by attackers to create spambots targeting the rest of the Matrix
|
This can be exploited by attackers to create spambots targetting the rest of the Matrix
|
||||||
federation.
|
federation.
|
||||||
|
|
||||||
Your new Matrix ID will be formed partly from the ``server_name``, and partly
|
Your new user name will be formed partly from the ``server_name``, and partly
|
||||||
from a localpart you specify when you create the account in the form of::
|
from a localpart you specify when you create the account. Your name will take
|
||||||
|
the form of::
|
||||||
|
|
||||||
@localpart:my.domain.name
|
@localpart:my.domain.name
|
||||||
|
|
||||||
(pronounced "at localpart on my dot domain dot name").
|
(pronounced "at localpart on my dot domain dot name").
|
||||||
|
|
||||||
As when logging in, you will need to specify a "Custom server". Specify your
|
As when logging in, you will need to specify a "Custom server". Specify your
|
||||||
desired ``localpart`` in the 'Username' box.
|
desired ``localpart`` in the 'User name' box.
|
||||||
|
|
||||||
🎯 Troubleshooting and support
|
🎯 Troubleshooting and support
|
||||||
==============================
|
=============================
|
||||||
|
|
||||||
🚀 Professional support
|
🚀 Professional support
|
||||||
-----------------------
|
----------------------
|
||||||
|
|
||||||
Enterprise quality support for Synapse including SLAs is available as part of an
|
Enterprise quality support for Synapse including SLAs is available as part of an
|
||||||
`Element Server Suite (ESS) <https://element.io/pricing>`_ subscription.
|
`Element Server Suite (ESS) <https://element.io/pricing>` subscription.
|
||||||
|
|
||||||
If you are an existing ESS subscriber then you can raise a `support request <https://ems.element.io/support>`_
|
If you are an existing ESS subscriber then you can raise a `support request <https://ems.element.io/support>`
|
||||||
and access the `knowledge base <https://ems-docs.element.io>`_.
|
and access the `knowledge base <https://ems-docs.element.io>`.
|
||||||
|
|
||||||
🤝 Community support
|
🤝 Community support
|
||||||
--------------------
|
-------------------
|
||||||
|
|
||||||
The `Admin FAQ <https://element-hq.github.io/synapse/latest/usage/administration/admin_faq.html>`_
|
The `Admin FAQ <https://element-hq.github.io/synapse/latest/usage/administration/admin_faq.html>`_
|
||||||
includes tips on dealing with some common problems. For more details, see
|
includes tips on dealing with some common problems. For more details, see
|
||||||
@@ -202,16 +202,16 @@ issues for support requests, only for bug reports and feature requests.
|
|||||||
.. _docs: docs
|
.. _docs: docs
|
||||||
|
|
||||||
🪪 Identity Servers
|
🪪 Identity Servers
|
||||||
===================
|
==================
|
||||||
|
|
||||||
Identity servers have the job of mapping email addresses and other 3rd Party
|
Identity servers have the job of mapping email addresses and other 3rd Party
|
||||||
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
||||||
before creating that mapping.
|
before creating that mapping.
|
||||||
|
|
||||||
**Identity servers do not store accounts or credentials - these are stored and managed on homeservers.
|
**They are not where accounts or credentials are stored - these live on home
|
||||||
Identity Servers are just for mapping 3rd Party IDs to Matrix IDs.**
|
servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
|
||||||
|
|
||||||
This process is highly security-sensitive, as there is an obvious risk of spam if it
|
This process is very security-sensitive, as there is obvious risk of spam if it
|
||||||
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
||||||
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
||||||
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
||||||
@@ -237,9 +237,9 @@ email address.
|
|||||||
We welcome contributions to Synapse from the community!
|
We welcome contributions to Synapse from the community!
|
||||||
The best place to get started is our
|
The best place to get started is our
|
||||||
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||||
This is part of our broader `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
||||||
information for Synapse developers as well as Synapse administrators.
|
|
||||||
|
|
||||||
|
information for Synapse developers as well as Synapse administrators.
|
||||||
Developers might be particularly interested in:
|
Developers might be particularly interested in:
|
||||||
|
|
||||||
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
||||||
@@ -249,24 +249,6 @@ Developers might be particularly interested in:
|
|||||||
Alongside all that, join our developer community on Matrix:
|
Alongside all that, join our developer community on Matrix:
|
||||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||||
|
|
||||||
Copyright and Licensing
|
|
||||||
=======================
|
|
||||||
|
|
||||||
| Copyright 2014-2017 OpenMarket Ltd
|
|
||||||
| Copyright 2017 Vector Creations Ltd
|
|
||||||
| Copyright 2017-2025 New Vector Ltd
|
|
||||||
|
|
|
||||||
|
|
||||||
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
|
|
||||||
|
|
||||||
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
|
|
||||||
|
|
||||||
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
|
||||||
|
|
||||||
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase an Element commercial license for this software.
|
|
||||||
|
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||||
:alt: (get community support in #synapse:matrix.org)
|
:alt: (get community support in #synapse:matrix.org)
|
||||||
|
|||||||
@@ -1,14 +1,12 @@
|
|||||||
# A build script for poetry that adds the rust extension.
|
# A build script for poetry that adds the rust extension.
|
||||||
|
|
||||||
import itertools
|
|
||||||
import os
|
import os
|
||||||
from typing import Any
|
from typing import Any, Dict
|
||||||
|
|
||||||
from packaging.specifiers import SpecifierSet
|
|
||||||
from setuptools_rust import Binding, RustExtension
|
from setuptools_rust import Binding, RustExtension
|
||||||
|
|
||||||
|
|
||||||
def build(setup_kwargs: dict[str, Any]) -> None:
|
def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
||||||
|
|
||||||
@@ -16,27 +14,10 @@ def build(setup_kwargs: dict[str, Any]) -> None:
|
|||||||
target="synapse.synapse_rust",
|
target="synapse.synapse_rust",
|
||||||
path=cargo_toml_path,
|
path=cargo_toml_path,
|
||||||
binding=Binding.PyO3,
|
binding=Binding.PyO3,
|
||||||
# This flag is a no-op in the latest versions. Instead, we need to
|
|
||||||
# specify this in the `bdist_wheel` config below.
|
|
||||||
py_limited_api=True,
|
py_limited_api=True,
|
||||||
# We always build in release mode, as we can't distinguish
|
# We force always building in release mode, as we can't tell the
|
||||||
# between using `poetry` in development vs production.
|
# difference between using `poetry` in development vs production.
|
||||||
debug=False,
|
debug=False,
|
||||||
)
|
)
|
||||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
||||||
setup_kwargs["zip_safe"] = False
|
setup_kwargs["zip_safe"] = False
|
||||||
|
|
||||||
# We look up the minimum supported Python version with
|
|
||||||
# `python_requires` (e.g. ">=3.10.0,<4.0.0") and finding the first Python
|
|
||||||
# version that matches. We then convert that into the `py_limited_api` form,
|
|
||||||
# e.g. cp310 for Python 3.10.
|
|
||||||
py_limited_api: str
|
|
||||||
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
|
||||||
for minor_version in itertools.count(start=10):
|
|
||||||
if f"3.{minor_version}.0" in python_bounds:
|
|
||||||
py_limited_api = f"cp3{minor_version}"
|
|
||||||
break
|
|
||||||
|
|
||||||
setup_kwargs.setdefault("options", {}).setdefault("bdist_wheel", {})[
|
|
||||||
"py_limited_api"
|
|
||||||
] = py_limited_api
|
|
||||||
|
|||||||
1
changelog.d/17187.feature
Normal file
1
changelog.d/17187.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add initial implementation of an experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
1
changelog.d/17198.misc
Normal file
1
changelog.d/17198.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove unused `expire_access_token` option in the Synapse Docker config file. Contributed by @AaronDewes.
|
||||||
1
changelog.d/17254.bugfix
Normal file
1
changelog.d/17254.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix searching for users with their exact localpart whose ID includes a hyphen.
|
||||||
1
changelog.d/17256.feature
Normal file
1
changelog.d/17256.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Improve ratelimiting in Synapse (#17256).
|
||||||
1
changelog.d/17265.misc
Normal file
1
changelog.d/17265.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Use fully-qualified `PersistedEventPosition` when returning `RoomsForUser` to facilitate proper comparisons and `RoomStreamToken` generation.
|
||||||
1
changelog.d/17266.misc
Normal file
1
changelog.d/17266.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add debug logging for when room keys are uploaded, including whether they are replacing other room keys.
|
||||||
1
changelog.d/17270.feature
Normal file
1
changelog.d/17270.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add support for the unstable [MSC4151](https://github.com/matrix-org/matrix-spec-proposals/pull/4151) report room API.
|
||||||
1
changelog.d/17271.misc
Normal file
1
changelog.d/17271.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Handle OTK uploads off master.
|
||||||
1
changelog.d/17272.bugfix
Normal file
1
changelog.d/17272.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix wrong retention policy being used when filtering events.
|
||||||
1
changelog.d/17273.misc
Normal file
1
changelog.d/17273.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Don't try and resync devices for remote users whose servers are marked as down.
|
||||||
1
changelog.d/17275.bugfix
Normal file
1
changelog.d/17275.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix bug where OTKs were not always included in `/sync` response when using workers.
|
||||||
1
changelog.d/17276.feature
Normal file
1
changelog.d/17276.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Filter for public and empty rooms added to Admin-API [List Room API](https://element-hq.github.io/synapse/latest/admin_api/rooms.html#list-room-api).
|
||||||
1
changelog.d/17277.feature
Normal file
1
changelog.d/17277.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add `is_dm` filtering to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
1
changelog.d/17279.misc
Normal file
1
changelog.d/17279.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Re-organize Pydantic models and types used in handlers.
|
||||||
1
changelog.d/17281.feature
Normal file
1
changelog.d/17281.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add `is_encrypted` filtering to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
1
changelog.d/17282.feature
Normal file
1
changelog.d/17282.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Include user membership in events served to clients, per MSC4115.
|
||||||
1
changelog.d/17283.bugfix
Normal file
1
changelog.d/17283.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a long-standing bug where an invalid 'from' parameter to [`/notifications`](https://spec.matrix.org/v1.10/client-server-api/#get_matrixclientv3notifications) would result in an Internal Server Error.
|
||||||
1
changelog.d/17284.feature
Normal file
1
changelog.d/17284.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Do not require user-interactive authentication for uploading cross-signing keys for the first time, per MSC3967.
|
||||||
1
changelog.d/17293.feature
Normal file
1
changelog.d/17293.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add `stream_ordering` sort to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
2
changelog.d/17294.feature
Normal file
2
changelog.d/17294.feature
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
`register_new_matrix_user` now supports a --password-file flag, which
|
||||||
|
is useful for scripting.
|
||||||
1
changelog.d/17295.bugfix
Normal file
1
changelog.d/17295.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix edge case in `/sync` returning the wrong the state when using sharded event persisters.
|
||||||
1
changelog.d/17296.feature
Normal file
1
changelog.d/17296.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add support for the unstable [MSC4151](https://github.com/matrix-org/matrix-spec-proposals/pull/4151) report room API.
|
||||||
1
changelog.d/17297.misc
Normal file
1
changelog.d/17297.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Bump `mypy` from 1.8.0 to 1.9.0.
|
||||||
1
changelog.d/17300.misc
Normal file
1
changelog.d/17300.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Expose the worker instance that persisted the event on `event.internal_metadata.instance_name`.
|
||||||
1
changelog.d/17301.bugfix
Normal file
1
changelog.d/17301.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add initial implementation of an experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||||
2
changelog.d/17304.feature
Normal file
2
changelog.d/17304.feature
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
`register_new_matrix_user` now supports a --exists-ok flag to allow registration of users that already exist in the database.
|
||||||
|
This is useful for scripts that bootstrap user accounts with initial passwords.
|
||||||
1
changelog.d/17308.doc
Normal file
1
changelog.d/17308.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add missing quotes for example for `exclude_rooms_from_sync`.
|
||||||
1
changelog.d/17322.feature
Normal file
1
changelog.d/17322.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add support for via query parameter from MSC415.
|
||||||
1
changelog.d/17324.misc
Normal file
1
changelog.d/17324.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Update the README with Element branding, improve headers and fix the #synapse:matrix.org support room link rendering.
|
||||||
1
changelog.d/17325.misc
Normal file
1
changelog.d/17325.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
This is a changelog so tests will run.
|
||||||
1
changelog.d/17331.misc
Normal file
1
changelog.d/17331.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Change path of the experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync implementation to `/org.matrix.simplified_msc3575/sync` since our simplified API is slightly incompatible with what's in the current MSC.
|
||||||
1
changelog.d/17339.misc
Normal file
1
changelog.d/17339.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Tidy up `parse_integer` docs and call sites to reflect the fact that they require non-negative integers by default, and bring `parse_integer_from_args` default in alignment. Contributed by Denis Kasak (@dkasak).
|
||||||
@@ -21,8 +21,7 @@
|
|||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
"""Starts a synapse client console."""
|
""" Starts a synapse client console. """
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import binascii
|
import binascii
|
||||||
import cmd
|
import cmd
|
||||||
@@ -33,6 +32,7 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from http import TwistedHttpClient
|
from http import TwistedHttpClient
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import urlparse
|
import urlparse
|
||||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
||||||
@@ -244,7 +244,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
if "flows" not in json_res:
|
if "flows" not in json_res:
|
||||||
print("Failed to find any login flows.")
|
print("Failed to find any login flows.")
|
||||||
return False
|
defer.returnValue(False)
|
||||||
|
|
||||||
flow = json_res["flows"][0] # assume first is the one we want.
|
flow = json_res["flows"][0] # assume first is the one we want.
|
||||||
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
||||||
@@ -253,8 +253,8 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"Unable to login via the command line client. Please visit "
|
"Unable to login via the command line client. Please visit "
|
||||||
"%s to login." % fallback_url
|
"%s to login." % fallback_url
|
||||||
)
|
)
|
||||||
return False
|
defer.returnValue(False)
|
||||||
return True
|
defer.returnValue(True)
|
||||||
|
|
||||||
def do_emailrequest(self, line):
|
def do_emailrequest(self, line):
|
||||||
"""Requests the association of a third party identifier
|
"""Requests the association of a third party identifier
|
||||||
@@ -725,7 +725,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
method,
|
method,
|
||||||
path,
|
path,
|
||||||
data=None,
|
data=None,
|
||||||
query_params: dict | None = None,
|
query_params: Optional[dict] = None,
|
||||||
alt_text=None,
|
alt_text=None,
|
||||||
):
|
):
|
||||||
"""Runs an HTTP request and pretty prints the output.
|
"""Runs an HTTP request and pretty prints the output.
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
import json
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
@@ -77,7 +78,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
url, data, headers_dict={"Content-Type": ["application/json"]}
|
url, data, headers_dict={"Content-Type": ["application/json"]}
|
||||||
)
|
)
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return response.code, body
|
defer.returnValue((response.code, body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_json(self, url, args=None):
|
def get_json(self, url, args=None):
|
||||||
@@ -87,9 +88,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
url = "%s?%s" % (url, qs)
|
url = "%s?%s" % (url, qs)
|
||||||
response = yield self._create_get_request(url)
|
response = yield self._create_get_request(url)
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return json.loads(body)
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict: dict | None = None):
|
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||||
"""Wrapper of _create_request to issue a PUT request"""
|
"""Wrapper of _create_request to issue a PUT request"""
|
||||||
headers_dict = headers_dict or {}
|
headers_dict = headers_dict or {}
|
||||||
|
|
||||||
@@ -100,7 +101,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict: dict | None = None):
|
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
||||||
"""Wrapper of _create_request to issue a GET request"""
|
"""Wrapper of _create_request to issue a GET request"""
|
||||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
||||||
|
|
||||||
@@ -112,7 +113,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
data=None,
|
data=None,
|
||||||
qparams=None,
|
qparams=None,
|
||||||
jsonreq=True,
|
jsonreq=True,
|
||||||
headers: dict | None = None,
|
headers: Optional[dict] = None,
|
||||||
):
|
):
|
||||||
headers = headers or {}
|
headers = headers or {}
|
||||||
|
|
||||||
@@ -133,11 +134,11 @@ class TwistedHttpClient(HttpClient):
|
|||||||
response = yield self._create_request(method, url)
|
response = yield self._create_request(method, url)
|
||||||
|
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return json.loads(body)
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(
|
def _create_request(
|
||||||
self, method, url, producer=None, headers_dict: dict | None = None
|
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
||||||
):
|
):
|
||||||
"""Creates and sends a request to the given url"""
|
"""Creates and sends a request to the given url"""
|
||||||
headers_dict = headers_dict or {}
|
headers_dict = headers_dict or {}
|
||||||
@@ -172,7 +173,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
if self.verbose:
|
if self.verbose:
|
||||||
print("Status %s %s" % (response.code, response.phrase))
|
print("Status %s %s" % (response.code, response.phrase))
|
||||||
print(pformat(list(response.headers.getAllRawHeaders())))
|
print(pformat(list(response.headers.getAllRawHeaders())))
|
||||||
return response
|
defer.returnValue(response)
|
||||||
|
|
||||||
def sleep(self, seconds):
|
def sleep(self, seconds):
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
|
|||||||
@@ -30,6 +30,3 @@ docker-compose up -d
|
|||||||
### More information
|
### More information
|
||||||
|
|
||||||
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
|
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
|
||||||
|
|
||||||
**For a more comprehensive Docker Compose example showcasing a full Matrix 2.0 stack, please see
|
|
||||||
https://github.com/element-hq/element-docker-demo**
|
|
||||||
@@ -51,7 +51,7 @@ services:
|
|||||||
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/postgres:15-alpine
|
image: docker.io/postgres:12-alpine
|
||||||
# Change that password, of course!
|
# Change that password, of course!
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=synapse
|
- POSTGRES_USER=synapse
|
||||||
|
|||||||
@@ -8,9 +8,6 @@ All examples and snippets assume that your Synapse service is called `synapse` i
|
|||||||
|
|
||||||
An example Docker Compose file can be found [here](docker-compose.yaml).
|
An example Docker Compose file can be found [here](docker-compose.yaml).
|
||||||
|
|
||||||
**For a more comprehensive Docker Compose example, showcasing a full Matrix 2.0 stack (originally based on this
|
|
||||||
docker-compose.yaml), please see https://github.com/element-hq/element-docker-demo**
|
|
||||||
|
|
||||||
## Worker Service Examples in Docker Compose
|
## Worker Service Examples in Docker Compose
|
||||||
|
|
||||||
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
||||||
|
|||||||
@@ -220,24 +220,29 @@
|
|||||||
"yBucketBound": "auto"
|
"yBucketBound": "auto"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
|
||||||
"uid": "${DS_PROMETHEUS}",
|
|
||||||
"type": "prometheus"
|
|
||||||
},
|
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
"dashLength": 10,
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"description": "",
|
||||||
"fieldConfig": {
|
"fieldConfig": {
|
||||||
"defaults": {
|
"defaults": {
|
||||||
"links": []
|
"links": []
|
||||||
},
|
},
|
||||||
"overrides": []
|
"overrides": []
|
||||||
},
|
},
|
||||||
|
"fill": 0,
|
||||||
|
"fillGradient": 0,
|
||||||
"gridPos": {
|
"gridPos": {
|
||||||
"h": 9,
|
"h": 9,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 12,
|
"x": 12,
|
||||||
"y": 1
|
"y": 1
|
||||||
},
|
},
|
||||||
|
"hiddenSeries": false,
|
||||||
"id": 152,
|
"id": 152,
|
||||||
"legend": {
|
"legend": {
|
||||||
"avg": false,
|
"avg": false,
|
||||||
@@ -250,81 +255,71 @@
|
|||||||
"values": false
|
"values": false
|
||||||
},
|
},
|
||||||
"lines": true,
|
"lines": true,
|
||||||
|
"linewidth": 0,
|
||||||
|
"links": [],
|
||||||
"nullPointMode": "connected",
|
"nullPointMode": "connected",
|
||||||
"options": {
|
"options": {
|
||||||
"alertThreshold": true
|
"alertThreshold": true
|
||||||
},
|
},
|
||||||
"paceLength": 10,
|
"paceLength": 10,
|
||||||
"pluginVersion": "10.4.3",
|
"percentage": false,
|
||||||
|
"pluginVersion": "9.2.2",
|
||||||
"pointradius": 5,
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
"renderer": "flot",
|
"renderer": "flot",
|
||||||
"seriesOverrides": [
|
"seriesOverrides": [
|
||||||
{
|
{
|
||||||
"alias": "Avg",
|
"alias": "Avg",
|
||||||
"fill": 0,
|
"fill": 0,
|
||||||
"linewidth": 3,
|
"linewidth": 3
|
||||||
"$$hashKey": "object:48"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "99%",
|
"alias": "99%",
|
||||||
"color": "#C4162A",
|
"color": "#C4162A",
|
||||||
"fillBelowTo": "90%",
|
"fillBelowTo": "90%"
|
||||||
"$$hashKey": "object:49"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "90%",
|
"alias": "90%",
|
||||||
"color": "#FF7383",
|
"color": "#FF7383",
|
||||||
"fillBelowTo": "75%",
|
"fillBelowTo": "75%"
|
||||||
"$$hashKey": "object:50"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "75%",
|
"alias": "75%",
|
||||||
"color": "#FFEE52",
|
"color": "#FFEE52",
|
||||||
"fillBelowTo": "50%",
|
"fillBelowTo": "50%"
|
||||||
"$$hashKey": "object:51"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "50%",
|
"alias": "50%",
|
||||||
"color": "#73BF69",
|
"color": "#73BF69",
|
||||||
"fillBelowTo": "25%",
|
"fillBelowTo": "25%"
|
||||||
"$$hashKey": "object:52"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "25%",
|
"alias": "25%",
|
||||||
"color": "#1F60C4",
|
"color": "#1F60C4",
|
||||||
"fillBelowTo": "5%",
|
"fillBelowTo": "5%"
|
||||||
"$$hashKey": "object:53"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "5%",
|
"alias": "5%",
|
||||||
"lines": false,
|
"lines": false
|
||||||
"$$hashKey": "object:54"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "Average",
|
"alias": "Average",
|
||||||
"color": "rgb(255, 255, 255)",
|
"color": "rgb(255, 255, 255)",
|
||||||
"lines": true,
|
"lines": true,
|
||||||
"linewidth": 3,
|
"linewidth": 3
|
||||||
"$$hashKey": "object:55"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "Local events being persisted",
|
"alias": "Events",
|
||||||
"color": "#96d98D",
|
|
||||||
"points": true,
|
|
||||||
"yaxis": 2,
|
|
||||||
"zindex": -3,
|
|
||||||
"$$hashKey": "object:56"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$$hashKey": "object:329",
|
|
||||||
"color": "#B877D9",
|
"color": "#B877D9",
|
||||||
"alias": "All events being persisted",
|
"hideTooltip": true,
|
||||||
"points": true,
|
"points": true,
|
||||||
"yaxis": 2,
|
"yaxis": 2,
|
||||||
"zindex": -3
|
"zindex": -3
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"spaceLength": 10,
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -389,20 +384,7 @@
|
|||||||
},
|
},
|
||||||
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
||||||
"legendFormat": "Average",
|
"legendFormat": "Average",
|
||||||
"refId": "H",
|
"refId": "H"
|
||||||
"editorMode": "code",
|
|
||||||
"range": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": {
|
|
||||||
"uid": "${DS_PROMETHEUS}"
|
|
||||||
},
|
|
||||||
"expr": "sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
|
||||||
"hide": false,
|
|
||||||
"instant": false,
|
|
||||||
"legendFormat": "Local events being persisted",
|
|
||||||
"refId": "E",
|
|
||||||
"editorMode": "code"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -411,9 +393,8 @@
|
|||||||
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
|
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
|
||||||
"hide": false,
|
"hide": false,
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "All events being persisted",
|
"legendFormat": "Events",
|
||||||
"refId": "I",
|
"refId": "E"
|
||||||
"editorMode": "code"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"thresholds": [
|
"thresholds": [
|
||||||
@@ -447,9 +428,7 @@
|
|||||||
"xaxis": {
|
"xaxis": {
|
||||||
"mode": "time",
|
"mode": "time",
|
||||||
"show": true,
|
"show": true,
|
||||||
"values": [],
|
"values": []
|
||||||
"name": null,
|
|
||||||
"buckets": null
|
|
||||||
},
|
},
|
||||||
"yaxes": [
|
"yaxes": [
|
||||||
{
|
{
|
||||||
@@ -471,20 +450,7 @@
|
|||||||
],
|
],
|
||||||
"yaxis": {
|
"yaxis": {
|
||||||
"align": false
|
"align": false
|
||||||
},
|
}
|
||||||
"bars": false,
|
|
||||||
"dashes": false,
|
|
||||||
"description": "",
|
|
||||||
"fill": 0,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"hiddenSeries": false,
|
|
||||||
"linewidth": 0,
|
|
||||||
"percentage": false,
|
|
||||||
"points": false,
|
|
||||||
"stack": false,
|
|
||||||
"steppedLine": false,
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
@@ -2166,10 +2132,10 @@
|
|||||||
"datasource": {
|
"datasource": {
|
||||||
"uid": "${DS_PROMETHEUS}"
|
"uid": "${DS_PROMETHEUS}"
|
||||||
},
|
},
|
||||||
"expr": "rate(synapse_storage_events_persisted_events_sep_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
"expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
"intervalFactor": 2,
|
"intervalFactor": 2,
|
||||||
"legendFormat": "{{origin_type}}",
|
"legendFormat": "{{type}}",
|
||||||
"refId": "D"
|
"refId": "D"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -2254,7 +2220,7 @@
|
|||||||
"datasource": {
|
"datasource": {
|
||||||
"uid": "${DS_PROMETHEUS}"
|
"uid": "${DS_PROMETHEUS}"
|
||||||
},
|
},
|
||||||
"expr": "sum by(type) (rate(synapse_storage_events_persisted_events_sep_total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
"expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"intervalFactor": 2,
|
"intervalFactor": 2,
|
||||||
@@ -2294,6 +2260,99 @@
|
|||||||
"align": false
|
"align": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {
|
||||||
|
"irc-freenode (local)": "#EAB839"
|
||||||
|
},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"decimals": 1,
|
||||||
|
"fill": 1,
|
||||||
|
"fillGradient": 0,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 7,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 44
|
||||||
|
},
|
||||||
|
"hiddenSeries": false,
|
||||||
|
"id": 44,
|
||||||
|
"legend": {
|
||||||
|
"alignAsTable": true,
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"hideEmpty": true,
|
||||||
|
"hideZero": true,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"links": [],
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"options": {
|
||||||
|
"alertThreshold": true
|
||||||
|
},
|
||||||
|
"percentage": false,
|
||||||
|
"pluginVersion": "9.2.2",
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 2,
|
||||||
|
"legendFormat": "{{origin_entity}} ({{origin_type}})",
|
||||||
|
"refId": "A",
|
||||||
|
"step": 20
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeRegions": [],
|
||||||
|
"title": "Events/s by Origin",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": false,
|
||||||
|
"sort": 2,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"mode": "time",
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "hertz",
|
||||||
|
"logBase": 1,
|
||||||
|
"min": "0",
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"logBase": 1,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
"bars": false,
|
"bars": false,
|
||||||
@@ -4303,7 +4362,7 @@
|
|||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "{{origin_server_name}} ",
|
"legendFormat": "{{server_name}} ",
|
||||||
"range": true,
|
"range": true,
|
||||||
"refId": "A"
|
"refId": "A"
|
||||||
}
|
}
|
||||||
@@ -4425,7 +4484,7 @@
|
|||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "{{destination_server_name}}",
|
"legendFormat": "{{server_name}}",
|
||||||
"range": true,
|
"range": true,
|
||||||
"refId": "A"
|
"refId": "A"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,10 +20,11 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import cgi
|
||||||
import datetime
|
import datetime
|
||||||
import html
|
|
||||||
import json
|
import json
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import pydot
|
import pydot
|
||||||
|
|
||||||
@@ -32,7 +33,7 @@ def make_name(pdu_id: str, origin: str) -> str:
|
|||||||
return f"{pdu_id}@{origin}"
|
return f"{pdu_id}@{origin}"
|
||||||
|
|
||||||
|
|
||||||
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||||
"""
|
"""
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
Generate a dot and SVG file for a graph of events in the room based on the
|
||||||
topological ordering by querying a homeserver.
|
topological ordering by querying a homeserver.
|
||||||
@@ -44,10 +45,6 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
|||||||
colors = {"red", "green", "blue", "yellow", "purple"}
|
colors = {"red", "green", "blue", "yellow", "purple"}
|
||||||
|
|
||||||
for pdu in pdus:
|
for pdu in pdus:
|
||||||
# TODO: The "origin" field has since been removed from events generated
|
|
||||||
# by Synapse. We should consider removing it here as well but since this
|
|
||||||
# is part of `contrib/`, it is left for the community to revise and ensure things
|
|
||||||
# still work correctly.
|
|
||||||
origins.add(pdu.get("origin"))
|
origins.add(pdu.get("origin"))
|
||||||
|
|
||||||
color_map = {color: color for color in colors if color in origins}
|
color_map = {color: color for color in colors if color in origins}
|
||||||
@@ -88,7 +85,7 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
|||||||
"name": name,
|
"name": name,
|
||||||
"type": pdu.get("pdu_type"),
|
"type": pdu.get("pdu_type"),
|
||||||
"state_key": pdu.get("state_key"),
|
"state_key": pdu.get("state_key"),
|
||||||
"content": html.escape(json.dumps(pdu.get("content")), quote=True),
|
"content": cgi.escape(json.dumps(pdu.get("content")), quote=True),
|
||||||
"time": t,
|
"time": t,
|
||||||
"depth": pdu.get("depth"),
|
"depth": pdu.get("depth"),
|
||||||
}
|
}
|
||||||
@@ -126,7 +123,7 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
|||||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
def get_pdus(host: str, room: str) -> list[dict]:
|
def get_pdus(host: str, room: str) -> List[dict]:
|
||||||
transaction = json.loads(
|
transaction = json.loads(
|
||||||
urllib.request.urlopen(
|
urllib.request.urlopen(
|
||||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
||||||
|
|||||||
@@ -44,3 +44,31 @@ groups:
|
|||||||
###
|
###
|
||||||
### End of 'Prometheus Console Only' rules block
|
### End of 'Prometheus Console Only' rules block
|
||||||
###
|
###
|
||||||
|
|
||||||
|
|
||||||
|
###
|
||||||
|
### Grafana Only
|
||||||
|
### The following rules are only needed if you use the Grafana dashboard
|
||||||
|
### in contrib/grafana/synapse.json
|
||||||
|
###
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
|
||||||
|
labels:
|
||||||
|
type: remote
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
|
||||||
|
labels:
|
||||||
|
type: local
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
|
||||||
|
labels:
|
||||||
|
type: bridges
|
||||||
|
|
||||||
|
- record: synapse_storage_events_persisted_by_event_type
|
||||||
|
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
|
||||||
|
|
||||||
|
- record: synapse_storage_events_persisted_by_origin
|
||||||
|
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
|
||||||
|
###
|
||||||
|
### End of 'Grafana Only' rules block
|
||||||
|
###
|
||||||
|
|||||||
2
debian/build_virtualenv
vendored
2
debian/build_virtualenv
vendored
@@ -35,7 +35,7 @@ TEMP_VENV="$(mktemp -d)"
|
|||||||
python3 -m venv "$TEMP_VENV"
|
python3 -m venv "$TEMP_VENV"
|
||||||
source "$TEMP_VENV/bin/activate"
|
source "$TEMP_VENV/bin/activate"
|
||||||
pip install -U pip
|
pip install -U pip
|
||||||
pip install poetry==2.1.1 poetry-plugin-export==1.9.0
|
pip install poetry==1.3.2
|
||||||
poetry export \
|
poetry export \
|
||||||
--extras all \
|
--extras all \
|
||||||
--extras test \
|
--extras test \
|
||||||
|
|||||||
614
debian/changelog
vendored
614
debian/changelog
vendored
@@ -1,618 +1,8 @@
|
|||||||
matrix-synapse-py3 (1.143.0~rc2) stable; urgency=medium
|
matrix-synapse-py3 (1.109.0+nmu1) UNRELEASED; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.143.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 17:36:08 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.143.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.143.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 13:08:39 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 12:25:23 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Nov 2025 09:45:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Nov 2025 10:54:42 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 17:39:11 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 16:21:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 13:20:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 29 Oct 2025 11:01:43 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Oct 2025 10:20:26 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.140.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.140.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.140.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 12:54:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* The licensing specifier has been updated to add an optional
|
|
||||||
`LicenseRef-Element-Commercial` license. The code was already licensed in
|
|
||||||
this manner - the debian metadata was just not updated to reflect it.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:17:17 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.138.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Sep 2025 12:16:14 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.137.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.137.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Aug 2025 10:23:41 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.137.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.137.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Aug 2025 10:55:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Aug 2025 13:18:03 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 12:18:52 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Aug 2025 08:13:30 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:52:01 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:13:15 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Aug 2025 13:12:28 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jul 2025 12:19:14 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Jul 2025 12:08:37 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.134.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.134.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jul 2025 14:22:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.134.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Jul 2025 11:27:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.133.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.133.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jul 2025 13:13:24 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.133.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.133.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Jun 2025 11:57:47 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.132.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.132.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jun 2025 13:16:20 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.132.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.132.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jun 2025 11:15:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.131.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.131.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Jun 2025 14:36:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.131.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.131.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 28 May 2025 10:25:44 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.130.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.130.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 May 2025 08:34:13 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.130.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.130.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 May 2025 10:44:04 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.129.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 May 2025 12:22:11 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.129.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Apr 2025 13:13:16 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.129.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Apr 2025 10:47:43 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.128.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.128.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Apr 2025 14:09:54 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.128.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* Update Poetry to 2.1.1.
|
|
||||||
* New synapse release 1.128.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Apr 2025 14:35:33 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Mar 2025 21:07:31 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Mar 2025 12:04:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Mar 2025 13:30:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Mar 2025 13:11:29 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Mar 2025 15:45:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Mar 2025 14:29:12 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Mar 2025 13:11:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.125.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.125.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Feb 2025 08:10:07 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.125.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.125.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Feb 2025 13:32:49 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Feb 2025 11:55:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Feb 2025 13:42:55 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Feb 2025 16:35:53 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Feb 2025 11:53:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.123.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.123.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2025 08:37:34 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.123.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.123.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Jan 2025 14:39:57 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.122.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.122.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Jan 2025 14:14:14 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.122.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.122.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Jan 2025 14:06:19 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.121.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.121.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 18:24:48 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.121.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.121.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 13:12:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.121.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.121.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 04 Dec 2024 14:47:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.120.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 15:43:37 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.120.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 09:07:57 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.120.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Nov 2024 13:10:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.120.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Nov 2024 15:02:21 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.119.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.119.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Nov 2024 13:57:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.119.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.119.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Nov 2024 14:33:02 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.119.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.119.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Nov 2024 08:59:43 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.118.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.118.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Oct 2024 15:29:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.118.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.118.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Oct 2024 11:48:14 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.117.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.117.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Oct 2024 10:46:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.117.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.117.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Oct 2024 14:37:11 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.116.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.116.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Oct 2024 11:14:07 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.116.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.116.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 26 Sep 2024 13:28:43 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.116.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.116.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 25 Sep 2024 09:34:07 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.115.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.115.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Sep 2024 14:32:10 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.115.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.115.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 12 Sep 2024 11:10:15 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.115.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.115.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Sep 2024 08:39:09 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.114.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.114.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 02 Sep 2024 15:14:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.114.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.114.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 16:38:05 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.114.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.114.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 15:35:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.114.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.114.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Aug 2024 12:55:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.113.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.113.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Aug 2024 14:36:56 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.113.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.113.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Aug 2024 12:23:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.112.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.112.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Jul 2024 17:15:48 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.112.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.112.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Jul 2024 08:58:55 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.111.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.111.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Jul 2024 16:13:52 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.111.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.111.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Jul 2024 12:42:46 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.111.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.111.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 10 Jul 2024 08:46:54 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.111.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.111.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Jul 2024 09:49:25 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.110.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.110.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 03 Jul 2024 09:08:59 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.110.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.110.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Jul 2024 08:28:56 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.110.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.110.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Jun 2024 18:14:48 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.110.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* `register_new_matrix_user` now supports a --password-file and a --exists-ok flag.
|
* `register_new_matrix_user` now supports a --password-file and a --exists-ok flag.
|
||||||
* New Synapse release 1.110.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Jun 2024 14:07:56 +0200
|
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jun 2024 13:29:36 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (1.109.0) stable; urgency=medium
|
matrix-synapse-py3 (1.109.0) stable; urgency=medium
|
||||||
|
|
||||||
|
|||||||
2
debian/copyright
vendored
2
debian/copyright
vendored
@@ -8,7 +8,7 @@ License: Apache-2.0
|
|||||||
|
|
||||||
Files: *
|
Files: *
|
||||||
Copyright: 2023 New Vector Ltd
|
Copyright: 2023 New Vector Ltd
|
||||||
License: AGPL-3.0-or-later or LicenseRef-Element-Commercial
|
License: AGPL-3.0-or-later
|
||||||
|
|
||||||
Files: synapse/config/saml2.py
|
Files: synapse/config/saml2.py
|
||||||
Copyright: 2015, Ericsson
|
Copyright: 2015, Ericsson
|
||||||
|
|||||||
27
debian/hash_password.1
vendored
27
debian/hash_password.1
vendored
@@ -1,13 +1,10 @@
|
|||||||
.\" generated with Ronn-NG/v0.10.1
|
.\" generated with Ronn-NG/v0.8.0
|
||||||
.\" http://github.com/apjanke/ronn-ng/tree/0.10.1
|
.\" http://github.com/apjanke/ronn-ng/tree/0.8.0
|
||||||
.TH "HASH_PASSWORD" "1" "August 2024" ""
|
.TH "HASH_PASSWORD" "1" "July 2021" "" ""
|
||||||
.SH "NAME"
|
.SH "NAME"
|
||||||
\fBhash_password\fR \- Calculate the hash of a new password, so that passwords can be reset
|
\fBhash_password\fR \- Calculate the hash of a new password, so that passwords can be reset
|
||||||
.SH "SYNOPSIS"
|
.SH "SYNOPSIS"
|
||||||
.TS
|
\fBhash_password\fR [\fB\-p\fR|\fB\-\-password\fR [password]] [\fB\-c\fR|\fB\-\-config\fR \fIfile\fR]
|
||||||
allbox;
|
|
||||||
\fBhash_password\fR [\fB\-p\fR \fB\-\-password\fR [password]] [\fB\-c\fR \fB\-\-config\fR \fIfile\fR]
|
|
||||||
.TE
|
|
||||||
.SH "DESCRIPTION"
|
.SH "DESCRIPTION"
|
||||||
\fBhash_password\fR calculates the hash of a supplied password using bcrypt\.
|
\fBhash_password\fR calculates the hash of a supplied password using bcrypt\.
|
||||||
.P
|
.P
|
||||||
@@ -23,7 +20,7 @@ bcrypt_rounds: 17 password_config: pepper: "random hashing pepper"
|
|||||||
.SH "OPTIONS"
|
.SH "OPTIONS"
|
||||||
.TP
|
.TP
|
||||||
\fB\-p\fR, \fB\-\-password\fR
|
\fB\-p\fR, \fB\-\-password\fR
|
||||||
Read the password form the command line if [password] is supplied, or from \fBSTDIN\fR\. If not, prompt the user and read the password from the tty prompt\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
|
Read the password form the command line if [password] is supplied\. If not, prompt the user and read the password form the \fBSTDIN\fR\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
|
||||||
.TP
|
.TP
|
||||||
\fB\-c\fR, \fB\-\-config\fR
|
\fB\-c\fR, \fB\-\-config\fR
|
||||||
Read the supplied YAML \fIfile\fR containing the options \fBbcrypt_rounds\fR and the \fBpassword_config\fR section containing the \fBpepper\fR value\.
|
Read the supplied YAML \fIfile\fR containing the options \fBbcrypt_rounds\fR and the \fBpassword_config\fR section containing the \fBpepper\fR value\.
|
||||||
@@ -36,17 +33,7 @@ $2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8\.X8fWFpum7SxZ9MFe
|
|||||||
.fi
|
.fi
|
||||||
.IP "" 0
|
.IP "" 0
|
||||||
.P
|
.P
|
||||||
Hash from the stdin:
|
Hash from the STDIN:
|
||||||
.IP "" 4
|
|
||||||
.nf
|
|
||||||
$ cat password_file | hash_password
|
|
||||||
Password:
|
|
||||||
Confirm password:
|
|
||||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX\.rcuAbM8ErLoUhybG
|
|
||||||
.fi
|
|
||||||
.IP "" 0
|
|
||||||
.P
|
|
||||||
Hash from the prompt:
|
|
||||||
.IP "" 4
|
.IP "" 4
|
||||||
.nf
|
.nf
|
||||||
$ hash_password
|
$ hash_password
|
||||||
@@ -66,6 +53,6 @@ $2b$12$CwI\.wBNr\.w3kmiUlV3T5s\.GT2wH7uebDCovDrCOh18dFedlANK99O
|
|||||||
.fi
|
.fi
|
||||||
.IP "" 0
|
.IP "" 0
|
||||||
.SH "COPYRIGHT"
|
.SH "COPYRIGHT"
|
||||||
This man page was written by Rahul De «rahulde@swecha\.net» for Debian GNU/Linux distribution\.
|
This man page was written by Rahul De <\fI\%mailto:rahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
|
||||||
.SH "SEE ALSO"
|
.SH "SEE ALSO"
|
||||||
synctl(1), synapse_port_db(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
|
synctl(1), synapse_port_db(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
|
||||||
|
|||||||
182
debian/hash_password.1.html
vendored
182
debian/hash_password.1.html
vendored
@@ -1,182 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta http-equiv='content-type' content='text/html;charset=utf-8'>
|
|
||||||
<meta name='generator' content='Ronn-NG/v0.10.1 (http://github.com/apjanke/ronn-ng/tree/0.10.1)'>
|
|
||||||
<title>hash_password(1) - Calculate the hash of a new password, so that passwords can be reset</title>
|
|
||||||
<style type='text/css' media='all'>
|
|
||||||
/* style: man */
|
|
||||||
body#manpage {margin:0}
|
|
||||||
.mp {max-width:100ex;padding:0 9ex 1ex 4ex}
|
|
||||||
.mp p,.mp pre,.mp ul,.mp ol,.mp dl {margin:0 0 20px 0}
|
|
||||||
.mp h2 {margin:10px 0 0 0}
|
|
||||||
.mp > p,.mp > pre,.mp > ul,.mp > ol,.mp > dl {margin-left:8ex}
|
|
||||||
.mp h3 {margin:0 0 0 4ex}
|
|
||||||
.mp dt {margin:0;clear:left}
|
|
||||||
.mp dt.flush {float:left;width:8ex}
|
|
||||||
.mp dd {margin:0 0 0 9ex}
|
|
||||||
.mp h1,.mp h2,.mp h3,.mp h4 {clear:left}
|
|
||||||
.mp pre {margin-bottom:20px}
|
|
||||||
.mp pre+h2,.mp pre+h3 {margin-top:22px}
|
|
||||||
.mp h2+pre,.mp h3+pre {margin-top:5px}
|
|
||||||
.mp img {display:block;margin:auto}
|
|
||||||
.mp h1.man-title {display:none}
|
|
||||||
.mp,.mp code,.mp pre,.mp tt,.mp kbd,.mp samp,.mp h3,.mp h4 {font-family:monospace;font-size:14px;line-height:1.42857142857143}
|
|
||||||
.mp h2 {font-size:16px;line-height:1.25}
|
|
||||||
.mp h1 {font-size:20px;line-height:2}
|
|
||||||
.mp {text-align:justify;background:#fff}
|
|
||||||
.mp,.mp code,.mp pre,.mp pre code,.mp tt,.mp kbd,.mp samp {color:#131211}
|
|
||||||
.mp h1,.mp h2,.mp h3,.mp h4 {color:#030201}
|
|
||||||
.mp u {text-decoration:underline}
|
|
||||||
.mp code,.mp strong,.mp b {font-weight:bold;color:#131211}
|
|
||||||
.mp em,.mp var {font-style:italic;color:#232221;text-decoration:none}
|
|
||||||
.mp a,.mp a:link,.mp a:hover,.mp a code,.mp a pre,.mp a tt,.mp a kbd,.mp a samp {color:#0000ff}
|
|
||||||
.mp b.man-ref {font-weight:normal;color:#434241}
|
|
||||||
.mp pre {padding:0 4ex}
|
|
||||||
.mp pre code {font-weight:normal;color:#434241}
|
|
||||||
.mp h2+pre,h3+pre {padding-left:0}
|
|
||||||
ol.man-decor,ol.man-decor li {margin:3px 0 10px 0;padding:0;float:left;width:33%;list-style-type:none;text-transform:uppercase;color:#999;letter-spacing:1px}
|
|
||||||
ol.man-decor {width:100%}
|
|
||||||
ol.man-decor li.tl {text-align:left}
|
|
||||||
ol.man-decor li.tc {text-align:center;letter-spacing:4px}
|
|
||||||
ol.man-decor li.tr {text-align:right;float:right}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<!--
|
|
||||||
The following styles are deprecated and will be removed at some point:
|
|
||||||
div#man, div#man ol.man, div#man ol.head, div#man ol.man.
|
|
||||||
|
|
||||||
The .man-page, .man-decor, .man-head, .man-foot, .man-title, and
|
|
||||||
.man-navigation should be used instead.
|
|
||||||
-->
|
|
||||||
<body id='manpage'>
|
|
||||||
<div class='mp' id='man'>
|
|
||||||
|
|
||||||
<div class='man-navigation' style='display:none'>
|
|
||||||
<a href="#NAME">NAME</a>
|
|
||||||
<a href="#SYNOPSIS">SYNOPSIS</a>
|
|
||||||
<a href="#DESCRIPTION">DESCRIPTION</a>
|
|
||||||
<a href="#FILES">FILES</a>
|
|
||||||
<a href="#OPTIONS">OPTIONS</a>
|
|
||||||
<a href="#EXAMPLES">EXAMPLES</a>
|
|
||||||
<a href="#COPYRIGHT">COPYRIGHT</a>
|
|
||||||
<a href="#SEE-ALSO">SEE ALSO</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<ol class='man-decor man-head man head'>
|
|
||||||
<li class='tl'>hash_password(1)</li>
|
|
||||||
<li class='tc'></li>
|
|
||||||
<li class='tr'>hash_password(1)</li>
|
|
||||||
</ol>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<h2 id="NAME">NAME</h2>
|
|
||||||
<p class="man-name">
|
|
||||||
<code>hash_password</code> - <span class="man-whatis">Calculate the hash of a new password, so that passwords can be reset</span>
|
|
||||||
</p>
|
|
||||||
<h2 id="SYNOPSIS">SYNOPSIS</h2>
|
|
||||||
|
|
||||||
<table>
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td>
|
|
||||||
<code>hash_password</code> [<code>-p</code>
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
<code>--password</code> [password]] [<code>-c</code>
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
<code>--config</code> <var>file</var>]</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<h2 id="DESCRIPTION">DESCRIPTION</h2>
|
|
||||||
|
|
||||||
<p><strong>hash_password</strong> calculates the hash of a supplied password using bcrypt.</p>
|
|
||||||
|
|
||||||
<p><code>hash_password</code> takes a password as an parameter either on the command line
|
|
||||||
or the <code>STDIN</code> if not supplied.</p>
|
|
||||||
|
|
||||||
<p>It accepts an YAML file which can be used to specify parameters like the
|
|
||||||
number of rounds for bcrypt and password_config section having the pepper
|
|
||||||
value used for the hashing. By default <code>bcrypt_rounds</code> is set to <strong>12</strong>.</p>
|
|
||||||
|
|
||||||
<p>The hashed password is written on the <code>STDOUT</code>.</p>
|
|
||||||
|
|
||||||
<h2 id="FILES">FILES</h2>
|
|
||||||
|
|
||||||
<p>A sample YAML file accepted by <code>hash_password</code> is described below:</p>
|
|
||||||
|
|
||||||
<p>bcrypt_rounds: 17
|
|
||||||
password_config:
|
|
||||||
pepper: "random hashing pepper"</p>
|
|
||||||
|
|
||||||
<h2 id="OPTIONS">OPTIONS</h2>
|
|
||||||
|
|
||||||
<dl>
|
|
||||||
<dt>
|
|
||||||
<code>-p</code>, <code>--password</code>
|
|
||||||
</dt>
|
|
||||||
<dd>Read the password form the command line if [password] is supplied, or from <code>STDIN</code>.
|
|
||||||
If not, prompt the user and read the password from the tty prompt.
|
|
||||||
It is not recommended to type the password on the command line
|
|
||||||
directly. Use the STDIN instead.</dd>
|
|
||||||
<dt>
|
|
||||||
<code>-c</code>, <code>--config</code>
|
|
||||||
</dt>
|
|
||||||
<dd>Read the supplied YAML <var>file</var> containing the options <code>bcrypt_rounds</code>
|
|
||||||
and the <code>password_config</code> section containing the <code>pepper</code> value.</dd>
|
|
||||||
</dl>
|
|
||||||
|
|
||||||
<h2 id="EXAMPLES">EXAMPLES</h2>
|
|
||||||
|
|
||||||
<p>Hash from the command line:</p>
|
|
||||||
|
|
||||||
<pre><code>$ hash_password -p "p@ssw0rd"
|
|
||||||
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8.X8fWFpum7SxZ9MFe
|
|
||||||
</code></pre>
|
|
||||||
|
|
||||||
<p>Hash from the stdin:</p>
|
|
||||||
|
|
||||||
<pre><code>$ cat password_file | hash_password
|
|
||||||
Password:
|
|
||||||
Confirm password:
|
|
||||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
|
|
||||||
</code></pre>
|
|
||||||
|
|
||||||
<p>Hash from the prompt:</p>
|
|
||||||
|
|
||||||
<pre><code>$ hash_password
|
|
||||||
Password:
|
|
||||||
Confirm password:
|
|
||||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
|
|
||||||
</code></pre>
|
|
||||||
|
|
||||||
<p>Using a config file:</p>
|
|
||||||
|
|
||||||
<pre><code>$ hash_password -c config.yml
|
|
||||||
Password:
|
|
||||||
Confirm password:
|
|
||||||
$2b$12$CwI.wBNr.w3kmiUlV3T5s.GT2wH7uebDCovDrCOh18dFedlANK99O
|
|
||||||
</code></pre>
|
|
||||||
|
|
||||||
<h2 id="COPYRIGHT">COPYRIGHT</h2>
|
|
||||||
|
|
||||||
<p>This man page was written by Rahul De «rahulde@swecha.net»
|
|
||||||
for Debian GNU/Linux distribution.</p>
|
|
||||||
|
|
||||||
<h2 id="SEE-ALSO">SEE ALSO</h2>
|
|
||||||
|
|
||||||
<p><span class="man-ref">synctl<span class="s">(1)</span></span>, <span class="man-ref">synapse_port_db<span class="s">(1)</span></span>, <span class="man-ref">register_new_matrix_user<span class="s">(1)</span></span>, <span class="man-ref">synapse_review_recent_signups<span class="s">(1)</span></span></p>
|
|
||||||
|
|
||||||
<ol class='man-decor man-foot man foot'>
|
|
||||||
<li class='tl'></li>
|
|
||||||
<li class='tc'>August 2024</li>
|
|
||||||
<li class='tr'>hash_password(1)</li>
|
|
||||||
</ol>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
13
debian/hash_password.ronn
vendored
13
debian/hash_password.ronn
vendored
@@ -29,8 +29,8 @@ A sample YAML file accepted by `hash_password` is described below:
|
|||||||
## OPTIONS
|
## OPTIONS
|
||||||
|
|
||||||
* `-p`, `--password`:
|
* `-p`, `--password`:
|
||||||
Read the password form the command line if [password] is supplied, or from `STDIN`.
|
Read the password form the command line if [password] is supplied.
|
||||||
If not, prompt the user and read the password from the tty prompt.
|
If not, prompt the user and read the password form the `STDIN`.
|
||||||
It is not recommended to type the password on the command line
|
It is not recommended to type the password on the command line
|
||||||
directly. Use the STDIN instead.
|
directly. Use the STDIN instead.
|
||||||
|
|
||||||
@@ -45,14 +45,7 @@ Hash from the command line:
|
|||||||
$ hash_password -p "p@ssw0rd"
|
$ hash_password -p "p@ssw0rd"
|
||||||
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8.X8fWFpum7SxZ9MFe
|
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8.X8fWFpum7SxZ9MFe
|
||||||
|
|
||||||
Hash from the stdin:
|
Hash from the STDIN:
|
||||||
|
|
||||||
$ cat password_file | hash_password
|
|
||||||
Password:
|
|
||||||
Confirm password:
|
|
||||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
|
|
||||||
|
|
||||||
Hash from the prompt:
|
|
||||||
|
|
||||||
$ hash_password
|
$ hash_password
|
||||||
Password:
|
Password:
|
||||||
|
|||||||
2
debian/templates
vendored
2
debian/templates
vendored
@@ -5,7 +5,7 @@ _Description: Name of the server:
|
|||||||
servers via federation. This is normally the public hostname of the
|
servers via federation. This is normally the public hostname of the
|
||||||
server running synapse, but can be different if you set up delegation.
|
server running synapse, but can be different if you set up delegation.
|
||||||
Please refer to the delegation documentation in this case:
|
Please refer to the delegation documentation in this case:
|
||||||
https://element-hq.github.io/synapse/latest/delegate.html.
|
https://github.com/element-hq/synapse/blob/master/docs/delegate.md.
|
||||||
|
|
||||||
Template: matrix-synapse/report-stats
|
Template: matrix-synapse/report-stats
|
||||||
Type: boolean
|
Type: boolean
|
||||||
|
|||||||
@@ -138,13 +138,6 @@ for port in 8080 8081 8082; do
|
|||||||
per_user:
|
per_user:
|
||||||
per_second: 1000
|
per_second: 1000
|
||||||
burst_count: 1000
|
burst_count: 1000
|
||||||
rc_presence:
|
|
||||||
per_user:
|
|
||||||
per_second: 1000
|
|
||||||
burst_count: 1000
|
|
||||||
rc_delayed_event_mgmt:
|
|
||||||
per_second: 1000
|
|
||||||
burst_count: 1000
|
|
||||||
RC
|
RC
|
||||||
)
|
)
|
||||||
echo "${ratelimiting}" >> "$port.config"
|
echo "${ratelimiting}" >> "$port.config"
|
||||||
|
|||||||
@@ -20,16 +20,45 @@
|
|||||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||||
# in `poetry export` in the past.
|
# in `poetry export` in the past.
|
||||||
|
|
||||||
ARG DEBIAN_VERSION=trixie
|
ARG PYTHON_VERSION=3.11
|
||||||
ARG PYTHON_VERSION=3.13
|
|
||||||
ARG POETRY_VERSION=2.1.1
|
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
### This stage is platform-agnostic, so we can use the build platform in case of cross-compilation.
|
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||||
###
|
# and other Dockerfiles used for testing are expecting bookworm.
|
||||||
FROM --platform=$BUILDPLATFORM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS requirements
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as requirements
|
||||||
|
|
||||||
|
# RUN --mount is specific to buildkit and is documented at
|
||||||
|
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||||
|
# Here we use it to set up a cache for apt (and below for pip), to improve
|
||||||
|
# rebuild speeds on slow connections.
|
||||||
|
RUN \
|
||||||
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
apt-get update -qq && apt-get install -yqq \
|
||||||
|
build-essential curl git libffi-dev libssl-dev pkg-config \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install rust and ensure its in the PATH.
|
||||||
|
# (Rust may be needed to compile `cryptography`---which is one of poetry's
|
||||||
|
# dependencies---on platforms that don't have a `cryptography` wheel.
|
||||||
|
ENV RUSTUP_HOME=/rust
|
||||||
|
ENV CARGO_HOME=/cargo
|
||||||
|
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||||
|
RUN mkdir /rust /cargo
|
||||||
|
|
||||||
|
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||||
|
|
||||||
|
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||||
|
# set to true, so we expose it as a build-arg.
|
||||||
|
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||||
|
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||||
|
|
||||||
|
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||||
|
# synapse's dependencies.
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
pip install --user "poetry==1.3.2"
|
||||||
|
|
||||||
WORKDIR /synapse
|
WORKDIR /synapse
|
||||||
|
|
||||||
@@ -46,30 +75,41 @@ ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION
|
|||||||
# Instead, we'll just install what a regular `pip install` would from PyPI.
|
# Instead, we'll just install what a regular `pip install` would from PyPI.
|
||||||
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||||
|
|
||||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
|
||||||
ENV UV_LINK_MODE=copy
|
|
||||||
|
|
||||||
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
|
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
|
||||||
# Otherwise, just create an empty requirements file so that the Dockerfile can
|
# Otherwise, just create an empty requirements file so that the Dockerfile can
|
||||||
# proceed.
|
# proceed.
|
||||||
ARG POETRY_VERSION
|
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
||||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
|
||||||
uvx --with poetry-plugin-export==1.9.0 \
|
|
||||||
poetry@${POETRY_VERSION} export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
|
||||||
else \
|
else \
|
||||||
touch /synapse/requirements.txt; \
|
touch /synapse/requirements.txt; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 1: builder
|
### Stage 1: builder
|
||||||
###
|
###
|
||||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS builder
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as builder
|
||||||
|
|
||||||
|
# install the OS build deps
|
||||||
|
RUN \
|
||||||
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
apt-get update -qq && apt-get install -yqq \
|
||||||
|
build-essential \
|
||||||
|
libffi-dev \
|
||||||
|
libjpeg-dev \
|
||||||
|
libpq-dev \
|
||||||
|
libssl-dev \
|
||||||
|
libwebp-dev \
|
||||||
|
libxml++2.6-dev \
|
||||||
|
libxslt1-dev \
|
||||||
|
openssl \
|
||||||
|
zlib1g-dev \
|
||||||
|
git \
|
||||||
|
curl \
|
||||||
|
libicu-dev \
|
||||||
|
pkg-config \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
|
||||||
ENV UV_LINK_MODE=copy
|
|
||||||
|
|
||||||
# Install rust and ensure its in the PATH
|
# Install rust and ensure its in the PATH
|
||||||
ENV RUSTUP_HOME=/rust
|
ENV RUSTUP_HOME=/rust
|
||||||
@@ -79,6 +119,7 @@ RUN mkdir /rust /cargo
|
|||||||
|
|
||||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||||
|
|
||||||
|
|
||||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||||
# set to true, so we expose it as a build-arg.
|
# set to true, so we expose it as a build-arg.
|
||||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||||
@@ -90,8 +131,8 @@ ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
|||||||
#
|
#
|
||||||
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
||||||
COPY --from=requirements /synapse/requirements.txt /synapse/
|
COPY --from=requirements /synapse/requirements.txt /synapse/
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
uv pip install --prefix="/install" --no-deps -r /synapse/requirements.txt
|
pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
|
||||||
|
|
||||||
# Copy over the rest of the synapse source code.
|
# Copy over the rest of the synapse source code.
|
||||||
COPY synapse /synapse/synapse/
|
COPY synapse /synapse/synapse/
|
||||||
@@ -105,86 +146,42 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
|||||||
# Install the synapse package itself.
|
# Install the synapse package itself.
|
||||||
# If we have populated requirements.txt, we don't install any dependencies
|
# If we have populated requirements.txt, we don't install any dependencies
|
||||||
# as we should already have those from the previous `pip install` step.
|
# as we should already have those from the previous `pip install` step.
|
||||||
RUN \
|
RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||||
--mount=type=cache,target=/root/.cache/uv \
|
|
||||||
--mount=type=cache,target=/synapse/target,sharing=locked \
|
|
||||||
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
||||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||||
uv pip install --prefix="/install" --no-deps /synapse[all]; \
|
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
|
||||||
else \
|
else \
|
||||||
uv pip install --prefix="/install" /synapse[all]; \
|
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 2: runtime dependencies download for ARM64 and AMD64
|
### Stage 2: runtime
|
||||||
###
|
###
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/library/debian:${DEBIAN_VERSION} AS runtime-deps
|
|
||||||
|
|
||||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
|
||||||
|
|
||||||
# Add both target architectures
|
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||||
RUN dpkg --add-architecture arm64
|
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||||
RUN dpkg --add-architecture amd64
|
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||||
|
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||||
|
|
||||||
# Fetch the runtime dependencies debs for both architectures
|
|
||||||
# We do that by building a recursive list of packages we need to download with `apt-cache depends`
|
|
||||||
# and then downloading them with `apt-get download`.
|
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
apt-get update -qq && \
|
apt-get update -qq && apt-get install -yqq \
|
||||||
apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \
|
curl \
|
||||||
curl \
|
gosu \
|
||||||
gosu \
|
libjpeg62-turbo \
|
||||||
libjpeg62-turbo \
|
libpq5 \
|
||||||
libpq5 \
|
libwebp7 \
|
||||||
libwebp7 \
|
xmlsec1 \
|
||||||
xmlsec1 \
|
libjemalloc2 \
|
||||||
libjemalloc2 \
|
libicu72 \
|
||||||
| grep '^\w' > /tmp/pkg-list && \
|
libssl-dev \
|
||||||
for arch in arm64 amd64; do \
|
openssl \
|
||||||
mkdir -p /tmp/debs-${arch} && \
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
chown _apt:root /tmp/debs-${arch} && \
|
|
||||||
cd /tmp/debs-${arch} && \
|
|
||||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
|
||||||
done
|
|
||||||
|
|
||||||
# Extract the debs for each architecture
|
COPY --from=builder /install /usr/local
|
||||||
RUN \
|
|
||||||
for arch in arm64 amd64; do \
|
|
||||||
mkdir -p /install-${arch}/var/lib/dpkg/status.d/ && \
|
|
||||||
for deb in /tmp/debs-${arch}/*.deb; do \
|
|
||||||
package_name=$(dpkg-deb -I ${deb} | awk '/^ Package: .*$/ {print $2}'); \
|
|
||||||
echo "Extracting: ${package_name}"; \
|
|
||||||
dpkg --ctrl-tarfile $deb | tar -Ox ./control > /install-${arch}/var/lib/dpkg/status.d/${package_name}; \
|
|
||||||
dpkg --extract $deb /install-${arch}; \
|
|
||||||
done; \
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
###
|
|
||||||
### Stage 3: runtime
|
|
||||||
###
|
|
||||||
|
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
|
||||||
|
|
||||||
ARG TARGETARCH
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url='https://github.com/element-hq/synapse'
|
|
||||||
LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synapse/latest/'
|
|
||||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
|
||||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element-Commercial'
|
|
||||||
|
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
|
||||||
|
|
||||||
# Copy the installed python packages from the builder stage.
|
|
||||||
#
|
|
||||||
# uv will generate a `.lock` file when installing packages, which we don't want
|
|
||||||
# to copy to the final image.
|
|
||||||
COPY --from=builder --exclude=.lock /install /usr/local
|
|
||||||
COPY ./docker/start.py /start.py
|
COPY ./docker/start.py /start.py
|
||||||
COPY ./docker/conf /conf
|
COPY ./docker/conf /conf
|
||||||
|
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ ARG distro=""
|
|||||||
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
|
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
|
||||||
# it's not obviously easier to use that than to build our own.)
|
# it's not obviously easier to use that than to build our own.)
|
||||||
|
|
||||||
FROM docker.io/library/${distro} AS builder
|
FROM docker.io/library/${distro} as builder
|
||||||
|
|
||||||
RUN apt-get update -qq -o Acquire::Languages=none
|
RUN apt-get update -qq -o Acquire::Languages=none
|
||||||
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||||
@@ -73,8 +73,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
|||||||
curl \
|
curl \
|
||||||
debhelper \
|
debhelper \
|
||||||
devscripts \
|
devscripts \
|
||||||
# Required for building cffi from source.
|
|
||||||
libffi-dev \
|
|
||||||
libsystemd-dev \
|
libsystemd-dev \
|
||||||
lsb-release \
|
lsb-release \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
|
|||||||
@@ -1,67 +1,51 @@
|
|||||||
# syntax=docker/dockerfile:1-labs
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=trixie
|
|
||||||
ARG PYTHON_VERSION=3.13
|
|
||||||
ARG REDIS_VERSION=7.2
|
|
||||||
|
|
||||||
# first of all, we create a base image with dependencies which we can copy into the
|
# first of all, we create a base image with an nginx which we can copy into the
|
||||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||||
# each time.
|
# each time.
|
||||||
|
|
||||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
FROM docker.io/library/debian:bookworm-slim AS deps_base
|
||||||
|
|
||||||
ARG DEBIAN_VERSION
|
|
||||||
ARG REDIS_VERSION
|
|
||||||
|
|
||||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
|
||||||
|
|
||||||
# The upstream redis-server deb has fewer dynamic libraries than Debian's package which makes it easier to copy later on
|
|
||||||
RUN \
|
|
||||||
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
|
||||||
chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \
|
|
||||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb ${DEBIAN_VERSION} main" | tee /etc/apt/sources.list.d/redis.list
|
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
apt-get update -qq && \
|
apt-get update -qq && \
|
||||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||||
nginx-light \
|
redis-server nginx-light
|
||||||
redis-server="6:${REDIS_VERSION}.*" redis-tools="6:${REDIS_VERSION}.*" \
|
|
||||||
# libicu is required by postgres, see `docker/complement/Dockerfile`
|
|
||||||
libicu76
|
|
||||||
|
|
||||||
RUN \
|
# Similarly, a base to copy the redis server from.
|
||||||
# remove default page
|
#
|
||||||
rm /etc/nginx/sites-enabled/default && \
|
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||||
# have nginx log to stderr/out
|
# which makes it much easier to copy (but we need to make sure we use an image
|
||||||
ln -sf /dev/stdout /var/log/nginx/access.log && \
|
# based on the same debian version as the synapse image, to make sure we get
|
||||||
ln -sf /dev/stderr /var/log/nginx/error.log
|
# the expected version of libc.
|
||||||
|
FROM docker.io/library/redis:7-bookworm AS redis_base
|
||||||
# --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache
|
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
|
||||||
uv pip install --link-mode=copy --prefix="/uv/usr/local" supervisor~=4.2
|
|
||||||
|
|
||||||
RUN mkdir -p /uv/etc/supervisor/conf.d
|
|
||||||
|
|
||||||
# now build the final image, based on the the regular Synapse docker image
|
# now build the final image, based on the the regular Synapse docker image
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
|
|
||||||
# Copy over dependencies
|
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||||
COPY --from=deps_base --parents /usr/lib/*-linux-gnu/libicu* /
|
# copy of python.
|
||||||
COPY --from=deps_base /usr/bin/redis-server /usr/local/bin
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
COPY --from=deps_base /uv /
|
pip install supervisor~=4.2
|
||||||
|
RUN mkdir -p /etc/supervisor/conf.d
|
||||||
|
|
||||||
|
# Copy over redis and nginx
|
||||||
|
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||||
|
|
||||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||||
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
|
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
|
||||||
COPY --from=deps_base /etc/nginx /etc/nginx
|
COPY --from=deps_base /etc/nginx /etc/nginx
|
||||||
COPY --from=deps_base /var/log/nginx /var/log/nginx
|
RUN rm /etc/nginx/sites-enabled/default
|
||||||
# chown to allow non-root user to write to http-*-temp-path dirs
|
RUN mkdir /var/log/nginx /var/lib/nginx
|
||||||
COPY --from=deps_base --chown=www-data:root /var/lib/nginx /var/lib/nginx
|
RUN chown www-data /var/lib/nginx
|
||||||
|
|
||||||
|
# have nginx log to stderr/out
|
||||||
|
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
||||||
|
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
||||||
|
|
||||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||||
COPY ./docker/conf-workers/* /conf/
|
COPY ./docker/conf-workers/* /conf/
|
||||||
@@ -80,4 +64,4 @@ FROM $FROM
|
|||||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||||
# is generated by configure_workers_and_start.py.
|
# is generated by configure_workers_and_start.py.
|
||||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||||
CMD ["/healthcheck.sh"]
|
CMD /bin/sh /healthcheck.sh
|
||||||
|
|||||||
@@ -114,9 +114,6 @@ The following environment variables are supported in `run` mode:
|
|||||||
is set via `docker run --user`, defaults to `991`, `991`. Note that this user
|
is set via `docker run --user`, defaults to `991`, `991`. Note that this user
|
||||||
must have permission to read the config files, and write to the data directories.
|
must have permission to read the config files, and write to the data directories.
|
||||||
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
||||||
* `SYNAPSE_HTTP_PROXY`: Passed through to the Synapse process as the `http_proxy` environment variable.
|
|
||||||
* `SYNAPSE_HTTPS_PROXY`: Passed through to the Synapse process as the `https_proxy` environment variable.
|
|
||||||
* `SYNAPSE_NO_PROXY`: Passed through to the Synapse process as `no_proxy` environment variable.
|
|
||||||
|
|
||||||
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:
|
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:
|
||||||
|
|
||||||
|
|||||||
@@ -11,9 +11,6 @@ DIST=$(cut -d ':' -f2 <<< "${distro:?}")
|
|||||||
cp -aT /synapse/source /synapse/build
|
cp -aT /synapse/source /synapse/build
|
||||||
cd /synapse/build
|
cd /synapse/build
|
||||||
|
|
||||||
# Delete any existing `.so` files to ensure a clean build.
|
|
||||||
rm -f /synapse/build/synapse/*.so
|
|
||||||
|
|
||||||
# if this is a prerelease, set the Section accordingly.
|
# if this is a prerelease, set the Section accordingly.
|
||||||
#
|
#
|
||||||
# When the package is later added to the package repo, reprepro will use the
|
# When the package is later added to the package repo, reprepro will use the
|
||||||
|
|||||||
@@ -9,24 +9,21 @@
|
|||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=trixie
|
|
||||||
|
|
||||||
FROM docker.io/library/postgres:14-${DEBIAN_VERSION} AS postgres_base
|
|
||||||
|
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
# First of all, we copy postgres server from the official postgres image,
|
# First of all, we copy postgres server from the official postgres image,
|
||||||
# since for repeated rebuilds, this is much faster than apt installing
|
# since for repeated rebuilds, this is much faster than apt installing
|
||||||
# postgres each time.
|
# postgres each time.
|
||||||
|
|
||||||
# This trick only works because we use a postgres image based on the same
|
# This trick only works because (a) the Synapse image happens to have all the
|
||||||
# debian version as Synapse's docker image (so the versions of the shared
|
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||||
# libraries match). Any missing libraries need to be added to either the
|
# the same debian version as Synapse's docker image (so the versions of the
|
||||||
# Synapse image or docker/Dockerfile-workers.
|
# shared libraries match).
|
||||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
|
||||||
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
|
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||||
ENV PATH="${PATH}:/usr/lib/postgresql/14/bin"
|
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||||
ENV PGDATA=/var/lib/postgresql/data
|
ENV PGDATA=/var/lib/postgresql/data
|
||||||
|
|
||||||
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
||||||
@@ -58,4 +55,4 @@ ENTRYPOINT ["/start_for_complement.sh"]
|
|||||||
|
|
||||||
# Update the healthcheck to have a shorter check interval
|
# Update the healthcheck to have a shorter check interval
|
||||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||||
CMD ["/healthcheck.sh"]
|
CMD /bin/sh /healthcheck.sh
|
||||||
|
|||||||
@@ -5,12 +5,12 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
echo "Complement Synapse launcher"
|
echo "Complement Synapse launcher"
|
||||||
echo " Args: $*"
|
echo " Args: $@"
|
||||||
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
||||||
|
|
||||||
function log {
|
function log {
|
||||||
d=$(printf '%(%Y-%m-%d %H:%M:%S)T,%.3s\n' ${EPOCHREALTIME/./ })
|
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||||
echo "$d $*"
|
echo "$d $@"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Set the server name of the homeserver
|
# Set the server name of the homeserver
|
||||||
@@ -54,6 +54,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
|||||||
export SYNAPSE_WORKER_TYPES="\
|
export SYNAPSE_WORKER_TYPES="\
|
||||||
event_persister:2, \
|
event_persister:2, \
|
||||||
background_worker, \
|
background_worker, \
|
||||||
|
frontend_proxy, \
|
||||||
event_creator, \
|
event_creator, \
|
||||||
user_dir, \
|
user_dir, \
|
||||||
media_repository, \
|
media_repository, \
|
||||||
@@ -64,7 +65,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
|||||||
client_reader, \
|
client_reader, \
|
||||||
appservice, \
|
appservice, \
|
||||||
pusher, \
|
pusher, \
|
||||||
device_lists:2, \
|
|
||||||
stream_writers=account_data+presence+receipts+to_device+typing"
|
stream_writers=account_data+presence+receipts+to_device+typing"
|
||||||
|
|
||||||
fi
|
fi
|
||||||
@@ -103,11 +103,12 @@ fi
|
|||||||
# Note that both the key and certificate are in PEM format (not DER).
|
# Note that both the key and certificate are in PEM format (not DER).
|
||||||
|
|
||||||
# First generate a configuration file to set up a Subject Alternative Name.
|
# First generate a configuration file to set up a Subject Alternative Name.
|
||||||
echo "\
|
cat > /conf/server.tls.conf <<EOF
|
||||||
.include /etc/ssl/openssl.cnf
|
.include /etc/ssl/openssl.cnf
|
||||||
|
|
||||||
[SAN]
|
[SAN]
|
||||||
subjectAltName=DNS:${SERVER_NAME}" > /conf/server.tls.conf
|
subjectAltName=DNS:${SERVER_NAME}
|
||||||
|
EOF
|
||||||
|
|
||||||
# Generate an RSA key
|
# Generate an RSA key
|
||||||
openssl genrsa -out /conf/server.tls.key 2048
|
openssl genrsa -out /conf/server.tls.key 2048
|
||||||
@@ -122,12 +123,12 @@ openssl x509 -req -in /conf/server.tls.csr \
|
|||||||
-out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN
|
-out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN
|
||||||
|
|
||||||
# Assert that we have a Subject Alternative Name in the certificate.
|
# Assert that we have a Subject Alternative Name in the certificate.
|
||||||
# (the test will exit with 1 here if there isn't a SAN in the certificate.)
|
# (grep will exit with 1 here if there isn't a SAN in the certificate.)
|
||||||
[[ $(openssl x509 -in /conf/server.tls.crt -noout -text) == *DNS:* ]]
|
openssl x509 -in /conf/server.tls.crt -noout -text | grep DNS:
|
||||||
|
|
||||||
export SYNAPSE_TLS_CERT=/conf/server.tls.crt
|
export SYNAPSE_TLS_CERT=/conf/server.tls.crt
|
||||||
export SYNAPSE_TLS_KEY=/conf/server.tls.key
|
export SYNAPSE_TLS_KEY=/conf/server.tls.key
|
||||||
|
|
||||||
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||||
# starts everything else
|
# starts everything else
|
||||||
exec /configure_workers_and_start.py "$@"
|
exec /configure_workers_and_start.py
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
#}
|
#}
|
||||||
|
|
||||||
## Server ##
|
## Server ##
|
||||||
public_baseurl: http://127.0.0.1:8008/
|
|
||||||
report_stats: False
|
report_stats: False
|
||||||
trusted_key_servers: []
|
trusted_key_servers: []
|
||||||
enable_registration: true
|
enable_registration: true
|
||||||
@@ -85,22 +84,6 @@ rc_invites:
|
|||||||
per_user:
|
per_user:
|
||||||
per_second: 1000
|
per_second: 1000
|
||||||
burst_count: 1000
|
burst_count: 1000
|
||||||
per_issuer:
|
|
||||||
per_second: 1000
|
|
||||||
burst_count: 1000
|
|
||||||
|
|
||||||
rc_presence:
|
|
||||||
per_user:
|
|
||||||
per_second: 9999
|
|
||||||
burst_count: 9999
|
|
||||||
|
|
||||||
rc_delayed_event_mgmt:
|
|
||||||
per_second: 9999
|
|
||||||
burst_count: 9999
|
|
||||||
|
|
||||||
rc_room_creation:
|
|
||||||
per_second: 9999
|
|
||||||
burst_count: 9999
|
|
||||||
|
|
||||||
federation_rr_transactions_per_room_per_second: 9999
|
federation_rr_transactions_per_room_per_second: 9999
|
||||||
|
|
||||||
@@ -121,20 +104,6 @@ experimental_features:
|
|||||||
msc3967_enabled: true
|
msc3967_enabled: true
|
||||||
# Expose a room summary for public rooms
|
# Expose a room summary for public rooms
|
||||||
msc3266_enabled: true
|
msc3266_enabled: true
|
||||||
# Send to-device messages to application services
|
|
||||||
msc2409_to_device_messages_enabled: true
|
|
||||||
# Allow application services to masquerade devices
|
|
||||||
msc3202_device_masquerading: true
|
|
||||||
# Sending device list changes, one-time key counts and fallback key usage to application services
|
|
||||||
msc3202_transaction_extensions: true
|
|
||||||
# Proxy OTK claim requests to exclusive ASes
|
|
||||||
msc3983_appservice_otk_claims: true
|
|
||||||
# Proxy key queries to exclusive ASes
|
|
||||||
msc3984_appservice_key_query: true
|
|
||||||
# Invite filtering
|
|
||||||
msc4155_enabled: true
|
|
||||||
# Thread Subscriptions
|
|
||||||
msc4306_enabled: true
|
|
||||||
|
|
||||||
server_notices:
|
server_notices:
|
||||||
system_mxid_localpart: _server
|
system_mxid_localpart: _server
|
||||||
@@ -142,18 +111,10 @@ server_notices:
|
|||||||
system_mxid_avatar_url: ""
|
system_mxid_avatar_url: ""
|
||||||
room_name: "Server Alert"
|
room_name: "Server Alert"
|
||||||
|
|
||||||
# Enable delayed events (msc4140)
|
|
||||||
max_event_delay_duration: 24h
|
|
||||||
|
|
||||||
|
|
||||||
# Disable sync cache so that initial `/sync` requests are up-to-date.
|
# Disable sync cache so that initial `/sync` requests are up-to-date.
|
||||||
caches:
|
caches:
|
||||||
sync_response_cache_duration: 0
|
sync_response_cache_duration: 0
|
||||||
|
|
||||||
|
|
||||||
# Complement assumes that it can publish to the room list by default.
|
|
||||||
room_list_publication_rules:
|
|
||||||
- action: allow
|
|
||||||
|
|
||||||
|
|
||||||
{% include "shared-orig.yaml.j2" %}
|
{% include "shared-orig.yaml.j2" %}
|
||||||
|
|||||||
@@ -38,13 +38,10 @@ server {
|
|||||||
{% if using_unix_sockets %}
|
{% if using_unix_sockets %}
|
||||||
proxy_pass http://unix:/run/main_public.sock;
|
proxy_pass http://unix:/run/main_public.sock;
|
||||||
{% else %}
|
{% else %}
|
||||||
# note: do not add a path (even a single /) after the port in `proxy_pass`,
|
|
||||||
# otherwise nginx will canonicalise the URI and cause signature verification
|
|
||||||
# errors.
|
|
||||||
proxy_pass http://localhost:8080;
|
proxy_pass http://localhost:8080;
|
||||||
{% endif %}
|
{% endif %}
|
||||||
proxy_set_header X-Forwarded-For $remote_addr;
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header Host $host:$server_port;
|
proxy_set_header Host $host;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
{% if use_forking_launcher %}
|
{% if use_forking_launcher %}
|
||||||
[program:synapse_fork]
|
[program:synapse_fork]
|
||||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
|
||||||
command=/usr/local/bin/python -m synapse.app.complement_fork_starter
|
command=/usr/local/bin/python -m synapse.app.complement_fork_starter
|
||||||
{{ main_config_path }}
|
{{ main_config_path }}
|
||||||
synapse.app.homeserver
|
synapse.app.homeserver
|
||||||
@@ -21,7 +20,6 @@ exitcodes=0
|
|||||||
|
|
||||||
{% else %}
|
{% else %}
|
||||||
[program:synapse_main]
|
[program:synapse_main]
|
||||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
|
||||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
|
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
|
||||||
--config-path="{{ main_config_path }}"
|
--config-path="{{ main_config_path }}"
|
||||||
--config-path=/conf/workers/shared.yaml
|
--config-path=/conf/workers/shared.yaml
|
||||||
@@ -38,7 +36,6 @@ exitcodes=0
|
|||||||
|
|
||||||
{% for worker in workers %}
|
{% for worker in workers %}
|
||||||
[program:synapse_{{ worker.name }}]
|
[program:synapse_{{ worker.name }}]
|
||||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
|
||||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
|
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
|
||||||
--config-path="{{ main_config_path }}"
|
--config-path="{{ main_config_path }}"
|
||||||
--config-path=/conf/workers/shared.yaml
|
--config-path=/conf/workers/shared.yaml
|
||||||
|
|||||||
@@ -77,13 +77,6 @@ loggers:
|
|||||||
#}
|
#}
|
||||||
synapse.visibility.filtered_event_debug:
|
synapse.visibility.filtered_event_debug:
|
||||||
level: DEBUG
|
level: DEBUG
|
||||||
|
|
||||||
{#
|
|
||||||
If Synapse is under test, we don't care about seeing the "Applying schema" log
|
|
||||||
lines at the INFO level every time we run the tests (it's 100 lines of bulk)
|
|
||||||
#}
|
|
||||||
synapse.storage.prepare_database:
|
|
||||||
level: WARN
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
root:
|
root:
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/local/bin/python
|
#!/usr/bin/env python
|
||||||
#
|
#
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
#
|
#
|
||||||
@@ -65,9 +65,13 @@ from itertools import chain
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
Mapping,
|
Mapping,
|
||||||
MutableMapping,
|
MutableMapping,
|
||||||
NoReturn,
|
NoReturn,
|
||||||
|
Optional,
|
||||||
|
Set,
|
||||||
SupportsIndex,
|
SupportsIndex,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -92,7 +96,7 @@ WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
|||||||
# Watching /_matrix/media and related needs a "media" listener
|
# Watching /_matrix/media and related needs a "media" listener
|
||||||
# Stream Writers require "client" and "replication" listeners because they
|
# Stream Writers require "client" and "replication" listeners because they
|
||||||
# have to attach by instance_map to the master process and have client endpoints.
|
# have to attach by instance_map to the master process and have client endpoints.
|
||||||
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||||
"pusher": {
|
"pusher": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": [],
|
"listener_resources": [],
|
||||||
@@ -113,7 +117,7 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
},
|
},
|
||||||
"media_repository": {
|
"media_repository": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["media", "client"],
|
"listener_resources": ["media"],
|
||||||
"endpoint_patterns": [
|
"endpoint_patterns": [
|
||||||
"^/_matrix/media/",
|
"^/_matrix/media/",
|
||||||
"^/_synapse/admin/v1/purge_media_cache$",
|
"^/_synapse/admin/v1/purge_media_cache$",
|
||||||
@@ -121,8 +125,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"^/_synapse/admin/v1/user/.*/media.*$",
|
"^/_synapse/admin/v1/user/.*/media.*$",
|
||||||
"^/_synapse/admin/v1/media/.*$",
|
"^/_synapse/admin/v1/media/.*$",
|
||||||
"^/_synapse/admin/v1/quarantine_media/.*$",
|
"^/_synapse/admin/v1/quarantine_media/.*$",
|
||||||
"^/_matrix/client/v1/media/.*$",
|
|
||||||
"^/_matrix/federation/v1/media/.*$",
|
|
||||||
],
|
],
|
||||||
# The first configured media worker will run the media background jobs
|
# The first configured media worker will run the media background jobs
|
||||||
"shared_extra_conf": {
|
"shared_extra_conf": {
|
||||||
@@ -174,9 +176,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/deactivate$",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)",
|
|
||||||
"^/_matrix/client/(r0|v3)/delete_devices$",
|
|
||||||
"^/_matrix/client/versions$",
|
"^/_matrix/client/versions$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||||
@@ -193,9 +192,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
|
|
||||||
],
|
],
|
||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
@@ -204,7 +200,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["federation"],
|
"listener_resources": ["federation"],
|
||||||
"endpoint_patterns": [
|
"endpoint_patterns": [
|
||||||
"^/_matrix/federation/v1/version$",
|
|
||||||
"^/_matrix/federation/(v1|v2)/event/",
|
"^/_matrix/federation/(v1|v2)/event/",
|
||||||
"^/_matrix/federation/(v1|v2)/state/",
|
"^/_matrix/federation/(v1|v2)/state/",
|
||||||
"^/_matrix/federation/(v1|v2)/state_ids/",
|
"^/_matrix/federation/(v1|v2)/state_ids/",
|
||||||
@@ -267,6 +262,13 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
|
"frontend_proxy": {
|
||||||
|
"app": "synapse.app.generic_worker",
|
||||||
|
"listener_resources": ["client", "replication"],
|
||||||
|
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
|
||||||
|
"shared_extra_conf": {},
|
||||||
|
"worker_extra_conf": "",
|
||||||
|
},
|
||||||
"account_data": {
|
"account_data": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["client", "replication"],
|
"listener_resources": ["client", "replication"],
|
||||||
@@ -301,13 +303,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"device_lists": {
|
|
||||||
"app": "synapse.app.generic_worker",
|
|
||||||
"listener_resources": ["client", "replication"],
|
|
||||||
"endpoint_patterns": [],
|
|
||||||
"shared_extra_conf": {},
|
|
||||||
"worker_extra_conf": "",
|
|
||||||
},
|
|
||||||
"typing": {
|
"typing": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["client", "replication"],
|
"listener_resources": ["client", "replication"],
|
||||||
@@ -324,15 +319,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"thread_subscriptions": {
|
|
||||||
"app": "synapse.app.generic_worker",
|
|
||||||
"listener_resources": ["client", "replication"],
|
|
||||||
"endpoint_patterns": [
|
|
||||||
"^/_matrix/client/unstable/io.element.msc4306/.*",
|
|
||||||
],
|
|
||||||
"shared_extra_conf": {},
|
|
||||||
"worker_extra_conf": "",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Templates for sections that may be inserted multiple times in config files
|
# Templates for sections that may be inserted multiple times in config files
|
||||||
@@ -363,11 +349,6 @@ def error(txt: str) -> NoReturn:
|
|||||||
|
|
||||||
|
|
||||||
def flush_buffers() -> None:
|
def flush_buffers() -> None:
|
||||||
"""
|
|
||||||
Python's `print()` buffers output by default, typically waiting until ~8KB
|
|
||||||
accumulates. This method can be used to flush the buffers so we can see the output
|
|
||||||
of any print statements so far.
|
|
||||||
"""
|
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
@@ -393,18 +374,16 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
|||||||
#
|
#
|
||||||
# We use append mode in case the files have already been written to by something else
|
# We use append mode in case the files have already been written to by something else
|
||||||
# (for instance, as part of the instructions in a dockerfile).
|
# (for instance, as part of the instructions in a dockerfile).
|
||||||
exists = os.path.isfile(dst)
|
|
||||||
with open(dst, "a") as outfile:
|
with open(dst, "a") as outfile:
|
||||||
# In case the existing file doesn't end with a newline
|
# In case the existing file doesn't end with a newline
|
||||||
if exists:
|
outfile.write("\n")
|
||||||
outfile.write("\n")
|
|
||||||
|
|
||||||
outfile.write(rendered)
|
outfile.write(rendered)
|
||||||
|
|
||||||
|
|
||||||
def add_worker_roles_to_shared_config(
|
def add_worker_roles_to_shared_config(
|
||||||
shared_config: dict,
|
shared_config: dict,
|
||||||
worker_types_set: set[str],
|
worker_types_set: Set[str],
|
||||||
worker_name: str,
|
worker_name: str,
|
||||||
worker_port: int,
|
worker_port: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -423,18 +402,16 @@ def add_worker_roles_to_shared_config(
|
|||||||
# streams
|
# streams
|
||||||
instance_map = shared_config.setdefault("instance_map", {})
|
instance_map = shared_config.setdefault("instance_map", {})
|
||||||
|
|
||||||
# This is a list of the stream_writers.
|
# This is a list of the stream_writers that there can be only one of. Events can be
|
||||||
stream_writers = {
|
# sharded, and therefore doesn't belong here.
|
||||||
|
singular_stream_writers = [
|
||||||
"account_data",
|
"account_data",
|
||||||
"events",
|
|
||||||
"device_lists",
|
|
||||||
"presence",
|
"presence",
|
||||||
"receipts",
|
"receipts",
|
||||||
"to_device",
|
"to_device",
|
||||||
"typing",
|
"typing",
|
||||||
"push_rules",
|
"push_rules",
|
||||||
"thread_subscriptions",
|
]
|
||||||
}
|
|
||||||
|
|
||||||
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
||||||
# roles, check each.
|
# roles, check each.
|
||||||
@@ -444,11 +421,28 @@ def add_worker_roles_to_shared_config(
|
|||||||
if "federation_sender" in worker_types_set:
|
if "federation_sender" in worker_types_set:
|
||||||
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
||||||
|
|
||||||
|
if "event_persister" in worker_types_set:
|
||||||
|
# Event persisters write to the events stream, so we need to update
|
||||||
|
# the list of event stream writers
|
||||||
|
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
||||||
|
worker_name
|
||||||
|
)
|
||||||
|
|
||||||
|
# Map of stream writer instance names to host/ports combos
|
||||||
|
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
||||||
|
instance_map[worker_name] = {
|
||||||
|
"path": f"/run/worker.{worker_port}",
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
instance_map[worker_name] = {
|
||||||
|
"host": "localhost",
|
||||||
|
"port": worker_port,
|
||||||
|
}
|
||||||
# Update the list of stream writers. It's convenient that the name of the worker
|
# Update the list of stream writers. It's convenient that the name of the worker
|
||||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||||
# is more than one.
|
# is more than one.
|
||||||
for worker in worker_types_set:
|
for worker in worker_types_set:
|
||||||
if worker in stream_writers:
|
if worker in singular_stream_writers:
|
||||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||||
worker, []
|
worker, []
|
||||||
).append(worker_name)
|
).append(worker_name)
|
||||||
@@ -467,9 +461,9 @@ def add_worker_roles_to_shared_config(
|
|||||||
|
|
||||||
|
|
||||||
def merge_worker_template_configs(
|
def merge_worker_template_configs(
|
||||||
existing_dict: dict[str, Any] | None,
|
existing_dict: Optional[Dict[str, Any]],
|
||||||
to_be_merged_dict: dict[str, Any],
|
to_be_merged_dict: Dict[str, Any],
|
||||||
) -> dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""When given an existing dict of worker template configuration consisting with both
|
"""When given an existing dict of worker template configuration consisting with both
|
||||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
||||||
return new dict.
|
return new dict.
|
||||||
@@ -480,7 +474,7 @@ def merge_worker_template_configs(
|
|||||||
existing_dict.
|
existing_dict.
|
||||||
Returns: The newly merged together dict values.
|
Returns: The newly merged together dict values.
|
||||||
"""
|
"""
|
||||||
new_dict: dict[str, Any] = {}
|
new_dict: Dict[str, Any] = {}
|
||||||
if not existing_dict:
|
if not existing_dict:
|
||||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
||||||
new_dict = to_be_merged_dict.copy()
|
new_dict = to_be_merged_dict.copy()
|
||||||
@@ -505,8 +499,8 @@ def merge_worker_template_configs(
|
|||||||
|
|
||||||
|
|
||||||
def insert_worker_name_for_worker_config(
|
def insert_worker_name_for_worker_config(
|
||||||
existing_dict: dict[str, Any], worker_name: str
|
existing_dict: Dict[str, Any], worker_name: str
|
||||||
) -> dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Insert a given worker name into the worker's configuration dict.
|
"""Insert a given worker name into the worker's configuration dict.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -522,7 +516,7 @@ def insert_worker_name_for_worker_config(
|
|||||||
return dict_to_edit
|
return dict_to_edit
|
||||||
|
|
||||||
|
|
||||||
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
|
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
Apply multiplier(if found) by returning a new expanded list with some basic error
|
||||||
checking.
|
checking.
|
||||||
@@ -583,7 +577,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
|||||||
|
|
||||||
def split_and_strip_string(
|
def split_and_strip_string(
|
||||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
||||||
) -> list[str]:
|
) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Helper to split a string on split_char and strip whitespace from each end of each
|
Helper to split a string on split_char and strip whitespace from each end of each
|
||||||
element.
|
element.
|
||||||
@@ -608,12 +602,12 @@ def generate_base_homeserver_config() -> None:
|
|||||||
# start.py already does this for us, so just call that.
|
# start.py already does this for us, so just call that.
|
||||||
# note that this script is copied in in the official, monolith dockerfile
|
# note that this script is copied in in the official, monolith dockerfile
|
||||||
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
||||||
subprocess.run([sys.executable, "/start.py", "migrate_config"], check=True)
|
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||||
|
|
||||||
|
|
||||||
def parse_worker_types(
|
def parse_worker_types(
|
||||||
requested_worker_types: list[str],
|
requested_worker_types: List[str],
|
||||||
) -> dict[str, set[str]]:
|
) -> Dict[str, Set[str]]:
|
||||||
"""Read the desired list of requested workers and prepare the data for use in
|
"""Read the desired list of requested workers and prepare the data for use in
|
||||||
generating worker config files while also checking for potential gotchas.
|
generating worker config files while also checking for potential gotchas.
|
||||||
|
|
||||||
@@ -629,14 +623,14 @@ def parse_worker_types(
|
|||||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
# A counter of worker_base_name -> int. Used for determining the name for a given
|
||||||
# worker when generating its config file, as each worker's name is just
|
# worker when generating its config file, as each worker's name is just
|
||||||
# worker_base_name followed by instance number
|
# worker_base_name followed by instance number
|
||||||
worker_base_name_counter: dict[str, int] = defaultdict(int)
|
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
||||||
|
|
||||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
# Similar to above, but more finely grained. This is used to determine we don't have
|
||||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
||||||
worker_type_shard_counter: dict[str, int] = defaultdict(int)
|
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
||||||
|
|
||||||
# The final result of all this processing
|
# The final result of all this processing
|
||||||
dict_to_return: dict[str, set[str]] = {}
|
dict_to_return: Dict[str, Set[str]] = {}
|
||||||
|
|
||||||
# Handle any multipliers requested for given workers.
|
# Handle any multipliers requested for given workers.
|
||||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
||||||
@@ -680,7 +674,7 @@ def parse_worker_types(
|
|||||||
|
|
||||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
# Split the worker_type_string on "+", remove whitespace from ends then make
|
||||||
# the list a set so it's deduplicated.
|
# the list a set so it's deduplicated.
|
||||||
worker_types_set: set[str] = set(
|
worker_types_set: Set[str] = set(
|
||||||
split_and_strip_string(worker_type_string, "+")
|
split_and_strip_string(worker_type_string, "+")
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -739,7 +733,7 @@ def generate_worker_files(
|
|||||||
environ: Mapping[str, str],
|
environ: Mapping[str, str],
|
||||||
config_path: str,
|
config_path: str,
|
||||||
data_dir: str,
|
data_dir: str,
|
||||||
requested_worker_types: dict[str, set[str]],
|
requested_worker_types: Dict[str, Set[str]],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Read the desired workers(if any) that is passed in and generate shared
|
"""Read the desired workers(if any) that is passed in and generate shared
|
||||||
homeserver, nginx and supervisord configs.
|
homeserver, nginx and supervisord configs.
|
||||||
@@ -760,7 +754,7 @@ def generate_worker_files(
|
|||||||
# First read the original config file and extract the listeners block. Then we'll
|
# First read the original config file and extract the listeners block. Then we'll
|
||||||
# add another listener for replication. Later we'll write out the result to the
|
# add another listener for replication. Later we'll write out the result to the
|
||||||
# shared config file.
|
# shared config file.
|
||||||
listeners: list[Any]
|
listeners: List[Any]
|
||||||
if using_unix_sockets:
|
if using_unix_sockets:
|
||||||
listeners = [
|
listeners = [
|
||||||
{
|
{
|
||||||
@@ -788,12 +782,12 @@ def generate_worker_files(
|
|||||||
# base shared worker jinja2 template. This config file will be passed to all
|
# base shared worker jinja2 template. This config file will be passed to all
|
||||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
# workers, included Synapse's main process. It is intended mainly for disabling
|
||||||
# functionality when certain workers are spun up, and adding a replication listener.
|
# functionality when certain workers are spun up, and adding a replication listener.
|
||||||
shared_config: dict[str, Any] = {"listeners": listeners}
|
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||||
|
|
||||||
# List of dicts that describe workers.
|
# List of dicts that describe workers.
|
||||||
# We pass this to the Supervisor template later to generate the appropriate
|
# We pass this to the Supervisor template later to generate the appropriate
|
||||||
# program blocks.
|
# program blocks.
|
||||||
worker_descriptors: list[dict[str, Any]] = []
|
worker_descriptors: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
||||||
# type to the ports of each worker. For example:
|
# type to the ports of each worker. For example:
|
||||||
@@ -801,14 +795,14 @@ def generate_worker_files(
|
|||||||
# worker_type: {1234, 1235, ...}}
|
# worker_type: {1234, 1235, ...}}
|
||||||
# }
|
# }
|
||||||
# and will be used to construct 'upstream' nginx directives.
|
# and will be used to construct 'upstream' nginx directives.
|
||||||
nginx_upstreams: dict[str, set[int]] = {}
|
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||||
|
|
||||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
||||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
# will be placed after the proxy_pass directive. The main benefit to representing
|
||||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
# this data as a dict over a str is that we can easily deduplicate endpoints
|
||||||
# across multiple instances of the same worker. The final rendering will be combined
|
# across multiple instances of the same worker. The final rendering will be combined
|
||||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
||||||
nginx_locations: dict[str, str] = {}
|
nginx_locations: Dict[str, str] = {}
|
||||||
|
|
||||||
# Create the worker configuration directory if it doesn't already exist
|
# Create the worker configuration directory if it doesn't already exist
|
||||||
os.makedirs("/conf/workers", exist_ok=True)
|
os.makedirs("/conf/workers", exist_ok=True)
|
||||||
@@ -842,7 +836,7 @@ def generate_worker_files(
|
|||||||
# yaml config file
|
# yaml config file
|
||||||
for worker_name, worker_types_set in requested_worker_types.items():
|
for worker_name, worker_types_set in requested_worker_types.items():
|
||||||
# The collected and processed data will live here.
|
# The collected and processed data will live here.
|
||||||
worker_config: dict[str, Any] = {}
|
worker_config: Dict[str, Any] = {}
|
||||||
|
|
||||||
# Merge all worker config templates for this worker into a single config
|
# Merge all worker config templates for this worker into a single config
|
||||||
for worker_type in worker_types_set:
|
for worker_type in worker_types_set:
|
||||||
@@ -872,13 +866,6 @@ def generate_worker_files(
|
|||||||
else:
|
else:
|
||||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||||
|
|
||||||
# Special case for event_persister: those are just workers that write to
|
|
||||||
# the `events` stream. For other workers, the worker name is the same
|
|
||||||
# name of the stream they write to, but for some reason it is not the
|
|
||||||
# case for event_persister.
|
|
||||||
if "event_persister" in worker_types_set:
|
|
||||||
worker_types_set.add("events")
|
|
||||||
|
|
||||||
# Update the shared config with sharding-related options if necessary
|
# Update the shared config with sharding-related options if necessary
|
||||||
add_worker_roles_to_shared_config(
|
add_worker_roles_to_shared_config(
|
||||||
shared_config, worker_types_set, worker_name, worker_port
|
shared_config, worker_types_set, worker_name, worker_port
|
||||||
@@ -1009,7 +996,6 @@ def generate_worker_files(
|
|||||||
"/healthcheck.sh",
|
"/healthcheck.sh",
|
||||||
healthcheck_urls=healthcheck_urls,
|
healthcheck_urls=healthcheck_urls,
|
||||||
)
|
)
|
||||||
os.chmod("/healthcheck.sh", 0o755)
|
|
||||||
|
|
||||||
# Ensure the logging directory exists
|
# Ensure the logging directory exists
|
||||||
log_dir = data_dir + "/logs"
|
log_dir = data_dir + "/logs"
|
||||||
@@ -1025,7 +1011,7 @@ def generate_worker_log_config(
|
|||||||
Returns: the path to the generated file
|
Returns: the path to the generated file
|
||||||
"""
|
"""
|
||||||
# Check whether we should write worker logs to disk, in addition to the console
|
# Check whether we should write worker logs to disk, in addition to the console
|
||||||
extra_log_template_args: dict[str, str | None] = {}
|
extra_log_template_args: Dict[str, Optional[str]] = {}
|
||||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||||
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
||||||
|
|
||||||
@@ -1049,7 +1035,7 @@ def generate_worker_log_config(
|
|||||||
return log_config_filepath
|
return log_config_filepath
|
||||||
|
|
||||||
|
|
||||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
parser = ArgumentParser()
|
parser = ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--generate-only",
|
"--generate-only",
|
||||||
@@ -1083,7 +1069,7 @@ def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
if not worker_types_env:
|
if not worker_types_env:
|
||||||
# No workers, just the main process
|
# No workers, just the main process
|
||||||
worker_types = []
|
worker_types = []
|
||||||
requested_worker_types: dict[str, Any] = {}
|
requested_worker_types: Dict[str, Any] = {}
|
||||||
else:
|
else:
|
||||||
# Split type names by comma, ignoring whitespace.
|
# Split type names by comma, ignoring whitespace.
|
||||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
worker_types = split_and_strip_string(worker_types_env, ",")
|
||||||
@@ -1111,13 +1097,6 @@ def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
else:
|
else:
|
||||||
log("Could not find %s, will not use" % (jemallocpath,))
|
log("Could not find %s, will not use" % (jemallocpath,))
|
||||||
|
|
||||||
# Empty strings are falsy in Python so this default is fine. We just can't have these
|
|
||||||
# be undefined because supervisord will complain about our
|
|
||||||
# `%(ENV_SYNAPSE_HTTP_PROXY)s` usage.
|
|
||||||
environ.setdefault("SYNAPSE_HTTP_PROXY", "")
|
|
||||||
environ.setdefault("SYNAPSE_HTTPS_PROXY", "")
|
|
||||||
environ.setdefault("SYNAPSE_NO_PROXY", "")
|
|
||||||
|
|
||||||
# Start supervisord, which will start Synapse, all of the configured worker
|
# Start supervisord, which will start Synapse, all of the configured worker
|
||||||
# processes, redis, nginx etc. according to the config we created above.
|
# processes, redis, nginx etc. according to the config we created above.
|
||||||
log("Starting supervisord")
|
log("Starting supervisord")
|
||||||
|
|||||||
@@ -3,14 +3,14 @@
|
|||||||
#
|
#
|
||||||
# Used by `complement.sh`. Not suitable for production use.
|
# Used by `complement.sh`. Not suitable for production use.
|
||||||
|
|
||||||
ARG PYTHON_VERSION=3.10
|
ARG PYTHON_VERSION=3.9
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
# We hardcode the use of Debian trixie here because this could change upstream
|
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||||
# and other Dockerfiles used for testing are expecting trixie.
|
# and other Dockerfiles used for testing are expecting bookworm.
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-trixie
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||||
|
|
||||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||||
# install the OS build deps
|
# install the OS build deps
|
||||||
|
|||||||
@@ -10,9 +10,6 @@
|
|||||||
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
|
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
|
||||||
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
|
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
|
||||||
# confusion due to to interleaving of the different processes.
|
# confusion due to to interleaving of the different processes.
|
||||||
prefixer() {
|
exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1)
|
||||||
mawk -W interactive '{printf("%s | %s\n", ENVIRON["SUPERVISOR_PROCESS_NAME"], $0); fflush() }'
|
exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2)
|
||||||
}
|
|
||||||
exec 1> >(prefixer)
|
|
||||||
exec 2> >(prefixer >&2)
|
|
||||||
exec "$@"
|
exec "$@"
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import os
|
|||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Mapping, MutableMapping, NoReturn
|
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
||||||
@@ -22,11 +22,6 @@ def error(txt: str) -> NoReturn:
|
|||||||
|
|
||||||
|
|
||||||
def flush_buffers() -> None:
|
def flush_buffers() -> None:
|
||||||
"""
|
|
||||||
Python's `print()` buffers output by default, typically waiting until ~8KB
|
|
||||||
accumulates. This method can be used to flush the buffers so we can see the output
|
|
||||||
of any print statements so far.
|
|
||||||
"""
|
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
@@ -50,7 +45,7 @@ def generate_config_from_template(
|
|||||||
config_dir: str,
|
config_dir: str,
|
||||||
config_path: str,
|
config_path: str,
|
||||||
os_environ: Mapping[str, str],
|
os_environ: Mapping[str, str],
|
||||||
ownership: str | None,
|
ownership: Optional[str],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Generate a homeserver.yaml from environment variables
|
"""Generate a homeserver.yaml from environment variables
|
||||||
|
|
||||||
@@ -69,7 +64,7 @@ def generate_config_from_template(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# populate some params from data files (if they exist, else create new ones)
|
# populate some params from data files (if they exist, else create new ones)
|
||||||
environ: dict[str, Any] = dict(os_environ)
|
environ: Dict[str, Any] = dict(os_environ)
|
||||||
secrets = {
|
secrets = {
|
||||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||||
@@ -147,7 +142,7 @@ def generate_config_from_template(
|
|||||||
subprocess.run(args, check=True)
|
subprocess.run(args, check=True)
|
||||||
|
|
||||||
|
|
||||||
def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> None:
|
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||||
"""Run synapse with a --generate-config param to generate a template config file
|
"""Run synapse with a --generate-config param to generate a template config file
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -200,7 +195,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> No
|
|||||||
subprocess.run(args, check=True)
|
subprocess.run(args, check=True)
|
||||||
|
|
||||||
|
|
||||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
mode = args[1] if len(args) > 1 else "run"
|
mode = args[1] if len(args) > 1 else "run"
|
||||||
|
|
||||||
# if we were given an explicit user to switch to, do so
|
# if we were given an explicit user to switch to, do so
|
||||||
|
|||||||
@@ -63,18 +63,6 @@ mdbook serve
|
|||||||
|
|
||||||
The URL at which the docs can be viewed at will be logged.
|
The URL at which the docs can be viewed at will be logged.
|
||||||
|
|
||||||
## Synapse configuration documentation
|
|
||||||
|
|
||||||
The [Configuration
|
|
||||||
Manual](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html)
|
|
||||||
page is generated from a YAML file,
|
|
||||||
[schema/synapse-config.schema.yaml](../schema/synapse-config.schema.yaml). To
|
|
||||||
add new options or modify existing ones, first edit that file, then run
|
|
||||||
[scripts-dev/gen_config_documentation.py](../scripts-dev/gen_config_documentation.py)
|
|
||||||
to generate an updated Configuration Manual markdown file.
|
|
||||||
|
|
||||||
Build the book as described above to preview it in a web browser.
|
|
||||||
|
|
||||||
## Configuration and theming
|
## Configuration and theming
|
||||||
|
|
||||||
The look and behaviour of the website is configured by the [book.toml](../book.toml) file
|
The look and behaviour of the website is configured by the [book.toml](../book.toml) file
|
||||||
|
|||||||
@@ -49,18 +49,14 @@
|
|||||||
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
||||||
- [Account data callbacks](modules/account_data_callbacks.md)
|
- [Account data callbacks](modules/account_data_callbacks.md)
|
||||||
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
|
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
|
||||||
- [Media repository callbacks](modules/media_repository_callbacks.md)
|
|
||||||
- [Ratelimit callbacks](modules/ratelimit_callbacks.md)
|
|
||||||
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
||||||
- [Workers](workers.md)
|
- [Workers](workers.md)
|
||||||
- [Using `synctl` with Workers](synctl_workers.md)
|
- [Using `synctl` with Workers](synctl_workers.md)
|
||||||
- [Systemd](systemd-with-workers/README.md)
|
- [Systemd](systemd-with-workers/README.md)
|
||||||
- [Administration](usage/administration/README.md)
|
- [Administration](usage/administration/README.md)
|
||||||
- [Backups](usage/administration/backups.md)
|
|
||||||
- [Admin API](usage/administration/admin_api/README.md)
|
- [Admin API](usage/administration/admin_api/README.md)
|
||||||
- [Account Validity](admin_api/account_validity.md)
|
- [Account Validity](admin_api/account_validity.md)
|
||||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||||
- [Fetch Event](admin_api/fetch_event.md)
|
|
||||||
- [Event Reports](admin_api/event_reports.md)
|
- [Event Reports](admin_api/event_reports.md)
|
||||||
- [Experimental Features](admin_api/experimental_features.md)
|
- [Experimental Features](admin_api/experimental_features.md)
|
||||||
- [Media](admin_api/media_admin_api.md)
|
- [Media](admin_api/media_admin_api.md)
|
||||||
@@ -69,13 +65,11 @@
|
|||||||
- [Registration Tokens](usage/administration/admin_api/registration_tokens.md)
|
- [Registration Tokens](usage/administration/admin_api/registration_tokens.md)
|
||||||
- [Manipulate Room Membership](admin_api/room_membership.md)
|
- [Manipulate Room Membership](admin_api/room_membership.md)
|
||||||
- [Rooms](admin_api/rooms.md)
|
- [Rooms](admin_api/rooms.md)
|
||||||
- [Scheduled tasks](admin_api/scheduled_tasks.md)
|
|
||||||
- [Server Notices](admin_api/server_notices.md)
|
- [Server Notices](admin_api/server_notices.md)
|
||||||
- [Statistics](admin_api/statistics.md)
|
- [Statistics](admin_api/statistics.md)
|
||||||
- [Users](admin_api/user_admin_api.md)
|
- [Users](admin_api/user_admin_api.md)
|
||||||
- [Server Version](admin_api/version_api.md)
|
- [Server Version](admin_api/version_api.md)
|
||||||
- [Federation](usage/administration/admin_api/federation.md)
|
- [Federation](usage/administration/admin_api/federation.md)
|
||||||
- [Client-Server API Extensions](admin_api/client_server_api_extensions.md)
|
|
||||||
- [Manhole](manhole.md)
|
- [Manhole](manhole.md)
|
||||||
- [Monitoring](metrics-howto.md)
|
- [Monitoring](metrics-howto.md)
|
||||||
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||||
@@ -116,8 +110,6 @@
|
|||||||
- [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md)
|
- [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md)
|
||||||
- [Media Repository](media_repository.md)
|
- [Media Repository](media_repository.md)
|
||||||
- [Room and User Statistics](room_and_user_statistics.md)
|
- [Room and User Statistics](room_and_user_statistics.md)
|
||||||
- [Releasing]()
|
|
||||||
- [Release Notes Review Checklist](development/internal_documentation/release_notes_review_checklist.md)
|
|
||||||
- [Scripts]()
|
- [Scripts]()
|
||||||
|
|
||||||
# Other
|
# Other
|
||||||
|
|||||||
@@ -1,67 +0,0 @@
|
|||||||
# Client-Server API Extensions
|
|
||||||
|
|
||||||
Server administrators can set special account data to change how the Client-Server API behaves for
|
|
||||||
their clients. Setting the account data, or having it already set, as a non-admin has no effect.
|
|
||||||
|
|
||||||
All configuration options can be set through the `io.element.synapse.admin_client_config` global
|
|
||||||
account data on the admin's user account.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```
|
|
||||||
PUT /_matrix/client/v3/user/{adminUserId}/account_data/io.element.synapse.admin_client_config
|
|
||||||
{
|
|
||||||
"return_soft_failed_events": true
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## See soft failed events
|
|
||||||
|
|
||||||
Learn more about soft failure from [the spec](https://spec.matrix.org/v1.14/server-server-api/#soft-failure).
|
|
||||||
|
|
||||||
To receive soft failed events in APIs like `/sync` and `/messages`, set `return_soft_failed_events`
|
|
||||||
to `true` in the admin client config. When `false`, the normal behaviour of these endpoints is to
|
|
||||||
exclude soft failed events.
|
|
||||||
|
|
||||||
**Note**: If the policy server flagged the event as spam and that caused soft failure, that will be indicated
|
|
||||||
in the event's `unsigned` content like so:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "m.room.message",
|
|
||||||
"other": "event_fields_go_here",
|
|
||||||
"unsigned": {
|
|
||||||
"io.element.synapse.soft_failed": true,
|
|
||||||
"io.element.synapse.policy_server_spammy": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Default: `false`
|
|
||||||
|
|
||||||
## See events marked spammy by policy servers
|
|
||||||
|
|
||||||
Learn more about policy servers from [MSC4284](https://github.com/matrix-org/matrix-spec-proposals/pull/4284).
|
|
||||||
|
|
||||||
Similar to `return_soft_failed_events`, clients logged in with admin accounts can see events which were
|
|
||||||
flagged by the policy server as spammy (and thus soft failed) by setting `return_policy_server_spammy_events`
|
|
||||||
to `true`.
|
|
||||||
|
|
||||||
`return_policy_server_spammy_events` may be `true` while `return_soft_failed_events` is `false` to only see
|
|
||||||
policy server-flagged events. When `return_soft_failed_events` is `true` however, `return_policy_server_spammy_events`
|
|
||||||
is always `true`.
|
|
||||||
|
|
||||||
Events which were flagged by the policy will be flagged as `io.element.synapse.policy_server_spammy` in the
|
|
||||||
event's `unsigned` content, like so:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "m.room.message",
|
|
||||||
"other": "event_fields_go_here",
|
|
||||||
"unsigned": {
|
|
||||||
"io.element.synapse.soft_failed": true,
|
|
||||||
"io.element.synapse.policy_server_spammy": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Default: `true` if `return_soft_failed_events` is `true`, otherwise `false`
|
|
||||||
@@ -60,11 +60,10 @@ paginate through.
|
|||||||
anything other than the return value of `next_token` from a previous call. Defaults to `0`.
|
anything other than the return value of `next_token` from a previous call. Defaults to `0`.
|
||||||
* `dir`: string - Direction of event report order. Whether to fetch the most recent
|
* `dir`: string - Direction of event report order. Whether to fetch the most recent
|
||||||
first (`b`) or the oldest first (`f`). Defaults to `b`.
|
first (`b`) or the oldest first (`f`). Defaults to `b`.
|
||||||
* `user_id`: optional string - Filter by the user ID of the reporter. This is the user who reported the event
|
* `user_id`: string - Is optional and filters to only return users with user IDs that
|
||||||
and wrote the reason.
|
contain this value. This is the user who reported the event and wrote the reason.
|
||||||
* `room_id`: optional string - Filter by room id.
|
* `room_id`: string - Is optional and filters to only return rooms with room IDs that
|
||||||
* `event_sender_user_id`: optional string - Filter by the sender of the reported event. This is the user who
|
contain this value.
|
||||||
the report was made against.
|
|
||||||
|
|
||||||
**Response**
|
**Response**
|
||||||
|
|
||||||
@@ -117,6 +116,7 @@ It returns a JSON body like the following:
|
|||||||
"hashes": {
|
"hashes": {
|
||||||
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
||||||
},
|
},
|
||||||
|
"origin": "matrix.org",
|
||||||
"origin_server_ts": 1592291711430,
|
"origin_server_ts": 1592291711430,
|
||||||
"prev_events": [
|
"prev_events": [
|
||||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user