mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-13 01:50:46 +00:00
Compare commits
1 Commits
v1.139.0
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
22906a1098 |
@@ -1,10 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
set -xeu
|
|
||||||
|
|
||||||
# On 32-bit Linux platforms, we need libatomic1 to use rustup
|
|
||||||
if command -v yum &> /dev/null; then
|
|
||||||
yum install -y libatomic
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Install a Rust toolchain
|
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal
|
|
||||||
@@ -11,12 +11,12 @@ with open("poetry.lock", "rb") as f:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
lock_version = lockfile["metadata"]["lock-version"]
|
lock_version = lockfile["metadata"]["lock-version"]
|
||||||
assert lock_version == "2.1"
|
assert lock_version == "2.0"
|
||||||
except Exception:
|
except Exception:
|
||||||
print(
|
print(
|
||||||
"""\
|
"""\
|
||||||
Lockfile is not version 2.1. You probably need to upgrade poetry on your local box
|
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
|
||||||
and re-run `poetry lock`. See the Poetry cheat sheet at
|
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
|
||||||
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ poetry run update_synapse_database --database-config .ci/postgres-config-unporte
|
|||||||
echo "+++ Comparing ported schema with unported schema"
|
echo "+++ Comparing ported schema with unported schema"
|
||||||
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
||||||
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse_unported > unported.sql
|
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse > ported.sql
|
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
|
||||||
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
||||||
diff -u unported.sql ported.sql | tee schema_diff
|
diff -u unported.sql ported.sql | tee schema_diff
|
||||||
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -9,4 +9,5 @@
|
|||||||
- End with either a period (.) or an exclamation mark (!).
|
- End with either a period (.) or an exclamation mark (!).
|
||||||
- Start with a capital letter.
|
- Start with a capital letter.
|
||||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
||||||
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct
|
||||||
|
(run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
||||||
|
|||||||
146
.github/workflows/docker.yml
vendored
146
.github/workflows/docker.yml
vendored
@@ -14,24 +14,26 @@ permissions:
|
|||||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build and push image for ${{ matrix.platform }}
|
runs-on: ubuntu-22.04
|
||||||
runs-on: ${{ matrix.runs_on }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- platform: linux/amd64
|
|
||||||
runs_on: ubuntu-24.04
|
|
||||||
suffix: linux-amd64
|
|
||||||
- platform: linux/arm64
|
|
||||||
runs_on: ubuntu-24.04-arm
|
|
||||||
suffix: linux-arm64
|
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set up QEMU
|
||||||
|
id: qemu
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Inspect builder
|
||||||
|
run: docker buildx inspect
|
||||||
|
|
||||||
|
- name: Install Cosign
|
||||||
|
uses: sigstore/cosign-installer@v3.7.0
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Extract version from pyproject.toml
|
- name: Extract version from pyproject.toml
|
||||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||||
@@ -41,91 +43,25 @@ jobs:
|
|||||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push by digest
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
labels: |
|
|
||||||
gitsha1=${{ github.sha }}
|
|
||||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
|
||||||
tags: |
|
|
||||||
docker.io/matrixdotorg/synapse
|
|
||||||
ghcr.io/element-hq/synapse
|
|
||||||
file: "docker/Dockerfile"
|
|
||||||
platforms: ${{ matrix.platform }}
|
|
||||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
|
||||||
|
|
||||||
- name: Export digest
|
|
||||||
run: |
|
|
||||||
mkdir -p ${{ runner.temp }}/digests
|
|
||||||
digest="${{ steps.build.outputs.digest }}"
|
|
||||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
|
||||||
|
|
||||||
- name: Upload digest
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: digests-${{ matrix.suffix }}
|
|
||||||
path: ${{ runner.temp }}/digests/*
|
|
||||||
if-no-files-found: error
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
merge:
|
|
||||||
name: Push merged images to ${{ matrix.repository }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
repository:
|
|
||||||
- docker.io/matrixdotorg/synapse
|
|
||||||
- ghcr.io/element-hq/synapse
|
|
||||||
|
|
||||||
needs:
|
|
||||||
- build
|
|
||||||
steps:
|
|
||||||
- name: Download digests
|
|
||||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
|
||||||
with:
|
|
||||||
path: ${{ runner.temp }}/digests
|
|
||||||
pattern: digests-*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
|
||||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Log in to GHCR
|
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
|
||||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
|
||||||
|
|
||||||
- name: Install Cosign
|
|
||||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
|
||||||
|
|
||||||
- name: Calculate docker image tag
|
- name: Calculate docker image tag
|
||||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
id: set-tag
|
||||||
|
uses: docker/metadata-action@master
|
||||||
with:
|
with:
|
||||||
images: ${{ matrix.repository }}
|
images: |
|
||||||
|
docker.io/matrixdotorg/synapse
|
||||||
|
ghcr.io/element-hq/synapse
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=false
|
latest=false
|
||||||
tags: |
|
tags: |
|
||||||
@@ -133,23 +69,31 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||||
type=pep440,pattern={{raw}}
|
type=pep440,pattern={{raw}}
|
||||||
type=sha
|
|
||||||
|
|
||||||
- name: Create manifest list and push
|
- name: Build and push all platforms
|
||||||
working-directory: ${{ runner.temp }}/digests
|
id: build-and-push
|
||||||
env:
|
uses: docker/build-push-action@v6
|
||||||
REPOSITORY: ${{ matrix.repository }}
|
with:
|
||||||
run: |
|
push: true
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
labels: |
|
||||||
$(printf "$REPOSITORY@sha256:%s " *)
|
gitsha1=${{ github.sha }}
|
||||||
|
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||||
|
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||||
|
file: "docker/Dockerfile"
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
- name: Sign each manifest
|
# arm64 builds OOM without the git fetch setting. c.f.
|
||||||
|
# https://github.com/rust-lang/cargo/issues/10583
|
||||||
|
build-args: |
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||||
|
|
||||||
|
- name: Sign the images with GitHub OIDC Token
|
||||||
env:
|
env:
|
||||||
REPOSITORY: ${{ matrix.repository }}
|
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||||
|
TAGS: ${{ steps.set-tag.outputs.tags }}
|
||||||
run: |
|
run: |
|
||||||
DIGESTS=""
|
images=""
|
||||||
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
|
for tag in ${TAGS}; do
|
||||||
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
|
images+="${tag}@${DIGEST} "
|
||||||
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
|
|
||||||
done
|
done
|
||||||
cosign sign --yes $DIGESTS
|
cosign sign --yes ${images}
|
||||||
|
|||||||
4
.github/workflows/docs-pr-netlify.yaml
vendored
4
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||||
- name: 📥 Download artifact
|
- name: 📥 Download artifact
|
||||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7
|
||||||
with:
|
with:
|
||||||
workflow: docs-pr.yaml
|
workflow: docs-pr.yaml
|
||||||
run_id: ${{ github.event.workflow_run.id }}
|
run_id: ${{ github.event.workflow_run.id }}
|
||||||
@@ -22,7 +22,7 @@ jobs:
|
|||||||
path: book
|
path: book
|
||||||
|
|
||||||
- name: 📤 Deploy to Netlify
|
- name: 📤 Deploy to Netlify
|
||||||
uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3
|
uses: matrix-org/netlify-pr-preview@v3
|
||||||
with:
|
with:
|
||||||
path: book
|
path: book
|
||||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||||
|
|||||||
8
.github/workflows/docs-pr.yaml
vendored
8
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
name: GitHub Pages
|
name: GitHub Pages
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: book
|
name: book
|
||||||
path: book
|
path: book
|
||||||
@@ -50,7 +50,7 @@ jobs:
|
|||||||
name: Check links in documentation
|
name: Check links in documentation
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||||
|
|||||||
16
.github/workflows/docs.yaml
vendored
16
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- pre
|
- pre
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -64,7 +64,7 @@ jobs:
|
|||||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
@@ -78,18 +78,6 @@ jobs:
|
|||||||
mdbook build
|
mdbook build
|
||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Prepare and publish schema files
|
|
||||||
run: |
|
|
||||||
sudo apt-get update && sudo apt-get install -y yq
|
|
||||||
mkdir -p book/schema
|
|
||||||
# Remove developer notice before publishing.
|
|
||||||
rm schema/v*/Do\ not\ edit\ files\ in\ this\ folder
|
|
||||||
# Copy schema files that are independent from current Synapse version.
|
|
||||||
cp -r -t book/schema schema/v*/
|
|
||||||
# Convert config schema from YAML source file to JSON.
|
|
||||||
yq < schema/synapse-config.schema.yaml \
|
|
||||||
> book/schema/synapse-config.schema.json
|
|
||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||||
|
|||||||
22
.github/workflows/fix_lint.yaml
vendored
22
.github/workflows/fix_lint.yaml
vendored
@@ -6,11 +6,6 @@ name: Attempt to automatically fix linting errors
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
|
||||||
# We use nightly so that `fmt` correctly groups together imports, and
|
|
||||||
# clippy correctly fixes up the benchmarks.
|
|
||||||
RUST_VERSION: nightly-2025-06-24
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fixup:
|
fixup:
|
||||||
name: Fix up
|
name: Fix up
|
||||||
@@ -18,20 +13,21 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
# We use nightly so that `fmt` correctly groups together imports, and
|
||||||
components: clippy, rustfmt
|
# clippy correctly fixes up the benchmarks.
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
toolchain: nightly-2022-12-01
|
||||||
|
components: rustfmt
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
install-project: "false"
|
install-project: "false"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
- name: Run ruff check
|
- name: Run ruff check
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
@@ -47,6 +43,6 @@ jobs:
|
|||||||
- run: cargo fmt
|
- run: cargo fmt
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
|
||||||
- uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1
|
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||||
with:
|
with:
|
||||||
commit_message: "Attempt to fix linting"
|
commit_message: "Attempt to fix linting"
|
||||||
|
|||||||
45
.github/workflows/latest_deps.yml
vendored
45
.github/workflows/latest_deps.yml
vendored
@@ -21,9 +21,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
check_repo:
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||||
@@ -42,25 +39,23 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||||
# poetry-core versions), so we install with poetry.
|
# poetry-core versions), so we install with poetry.
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
# Dump installed versions for debugging.
|
# Dump installed versions for debugging.
|
||||||
- run: poetry run pip list > before.txt
|
- run: poetry run pip list > before.txt
|
||||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||||
# `pip install matrix-synapse[all]` as closely as possible.
|
# `pip install matrix-synapse[all]` as closely as possible.
|
||||||
- run: poetry update --without dev
|
- run: poetry update --no-dev
|
||||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||||
- name: Remove unhelpful options from mypy config
|
- name: Remove unhelpful options from mypy config
|
||||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||||
@@ -77,13 +72,11 @@ jobs:
|
|||||||
postgres-version: "14"
|
postgres-version: "14"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||||
@@ -93,7 +86,7 @@ jobs:
|
|||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.postgres-version }}
|
postgres:${{ matrix.postgres-version }}
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: pip install .[all,test]
|
- run: pip install .[all,test]
|
||||||
@@ -152,13 +145,11 @@ jobs:
|
|||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- name: Ensure sytest runs `pip install`
|
- name: Ensure sytest runs `pip install`
|
||||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||||
@@ -173,7 +164,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -201,15 +192,15 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out synapse codebase
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -234,7 +225,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
|||||||
name: "Check locked dependencies have sdists"
|
name: "Check locked dependencies have sdists"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- run: pip install tomli
|
- run: pip install tomli
|
||||||
|
|||||||
10
.github/workflows/push_complement_image.yml
vendored
10
.github/workflows/push_complement_image.yml
vendored
@@ -33,29 +33,29 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout specific branch (debug build)
|
- name: Checkout specific branch (debug build)
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.branch }}
|
ref: ${{ inputs.branch }}
|
||||||
- name: Checkout clean copy of develop (scheduled build)
|
- name: Checkout clean copy of develop (scheduled build)
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule'
|
||||||
with:
|
with:
|
||||||
ref: develop
|
ref: develop
|
||||||
- name: Checkout clean copy of master (on-push)
|
- name: Checkout clean copy of master (on-push)
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
with:
|
with:
|
||||||
ref: master
|
ref: master
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Work out labels for complement image
|
- name: Work out labels for complement image
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||||
tags: |
|
tags: |
|
||||||
|
|||||||
72
.github/workflows/release-artifacts.yml
vendored
72
.github/workflows/release-artifacts.yml
vendored
@@ -27,10 +27,10 @@ jobs:
|
|||||||
name: "Calculate list of debian distros"
|
name: "Calculate list of debian distros"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: '3.x'
|
||||||
- id: set-distros
|
- id: set-distros
|
||||||
run: |
|
run: |
|
||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||||
@@ -55,18 +55,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: src
|
path: src
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
- name: Set up docker layer caching
|
||||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
@@ -74,9 +74,9 @@ jobs:
|
|||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
- name: Set up python
|
- name: Set up python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: '3.x'
|
||||||
|
|
||||||
- name: Build the packages
|
- name: Build the packages
|
||||||
# see https://github.com/docker/build-push-action/issues/252
|
# see https://github.com/docker/build-push-action/issues/252
|
||||||
@@ -101,21 +101,18 @@ jobs:
|
|||||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
- name: Upload debs as artifacts
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
||||||
path: debs/*
|
path: debs/*
|
||||||
|
|
||||||
build-wheels:
|
build-wheels:
|
||||||
name: Build wheels on ${{ matrix.os }}
|
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os:
|
os: [ubuntu-22.04, macos-13]
|
||||||
- ubuntu-24.04
|
arch: [x86_64, aarch64]
|
||||||
- ubuntu-24.04-arm
|
|
||||||
- macos-13 # This uses x86-64
|
|
||||||
- macos-14 # This uses arm64
|
|
||||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||||
# It is not read by the rest of the workflow.
|
# It is not read by the rest of the workflow.
|
||||||
is_pr:
|
is_pr:
|
||||||
@@ -125,27 +122,38 @@ jobs:
|
|||||||
# Don't build macos wheels on PR CI.
|
# Don't build macos wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "macos-13"
|
os: "macos-13"
|
||||||
- is_pr: true
|
# Don't build aarch64 wheels on mac.
|
||||||
os: "macos-14"
|
- os: "macos-13"
|
||||||
|
arch: aarch64
|
||||||
# Don't build aarch64 wheels on PR CI.
|
# Don't build aarch64 wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "ubuntu-24.04-arm"
|
arch: aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||||
# here, because `python` on osx points to Python 2.7.
|
# here, because `python` on osx points to Python 2.7.
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
- name: Install cibuildwheel
|
||||||
run: python -m pip install cibuildwheel==3.0.0
|
run: python -m pip install cibuildwheel==2.19.1
|
||||||
|
|
||||||
|
- name: Set up QEMU to emulate aarch64
|
||||||
|
if: matrix.arch == 'aarch64'
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
|
|
||||||
|
- name: Build aarch64 wheels
|
||||||
|
if: matrix.arch == 'aarch64'
|
||||||
|
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Only build a single wheel on PR
|
- name: Only build a single wheel on PR
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
run: echo "CIBW_BUILD="cp39-manylinux_*"" >> $GITHUB_ENV
|
run: echo "CIBW_BUILD="cp39-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
run: python -m cibuildwheel --output-dir wheelhouse
|
||||||
@@ -153,10 +161,13 @@ jobs:
|
|||||||
# Skip testing for platforms which various libraries don't have wheels
|
# Skip testing for platforms which various libraries don't have wheels
|
||||||
# for, and so need extra build deps.
|
# for, and so need extra build deps.
|
||||||
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||||
|
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||||
|
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||||
|
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: Wheel-${{ matrix.os }}
|
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
|
||||||
path: ./wheelhouse/*.whl
|
path: ./wheelhouse/*.whl
|
||||||
|
|
||||||
build-sdist:
|
build-sdist:
|
||||||
@@ -165,21 +176,22 @@ jobs:
|
|||||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: '3.10'
|
||||||
|
|
||||||
- run: pip install build
|
- run: pip install build
|
||||||
|
|
||||||
- name: Build sdist
|
- name: Build sdist
|
||||||
run: python -m build --sdist
|
run: python -m build --sdist
|
||||||
|
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: Sdist
|
name: Sdist
|
||||||
path: dist/*.tar.gz
|
path: dist/*.tar.gz
|
||||||
|
|
||||||
|
|
||||||
# if it's a tag, create a release and attach the artifacts to it
|
# if it's a tag, create a release and attach the artifacts to it
|
||||||
attach-assets:
|
attach-assets:
|
||||||
name: "Attach assets to release"
|
name: "Attach assets to release"
|
||||||
@@ -191,7 +203,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download all workflow run artifacts
|
- name: Download all workflow run artifacts
|
||||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
uses: actions/download-artifact@v4
|
||||||
- name: Build a tarball for the debs
|
- name: Build a tarball for the debs
|
||||||
# We need to merge all the debs uploads into one folder, then compress
|
# We need to merge all the debs uploads into one folder, then compress
|
||||||
# that.
|
# that.
|
||||||
@@ -201,7 +213,7 @@ jobs:
|
|||||||
tar -cvJf debs.tar.xz debs
|
tar -cvJf debs.tar.xz debs
|
||||||
- name: Attach to release
|
- name: Attach to release
|
||||||
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
|
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
|
||||||
uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v0.1.15
|
uses: softprops/action-gh-release@v0.1.15
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
|
|||||||
57
.github/workflows/schema.yaml
vendored
57
.github/workflows/schema.yaml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: Schema
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- schema/**
|
|
||||||
- docs/usage/configuration/config_documentation.md
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
validate-schema:
|
|
||||||
name: Ensure Synapse config schema is valid
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- name: Install check-jsonschema
|
|
||||||
run: pip install check-jsonschema==0.33.0
|
|
||||||
|
|
||||||
- name: Validate meta schema
|
|
||||||
run: check-jsonschema --check-metaschema schema/v*/meta.schema.json
|
|
||||||
- name: Validate schema
|
|
||||||
run: |-
|
|
||||||
# Please bump on introduction of a new meta schema.
|
|
||||||
LATEST_META_SCHEMA_VERSION=v1
|
|
||||||
check-jsonschema \
|
|
||||||
--schemafile="schema/$LATEST_META_SCHEMA_VERSION/meta.schema.json" \
|
|
||||||
schema/synapse-config.schema.yaml
|
|
||||||
- name: Validate default config
|
|
||||||
# Populates the empty instance with default values and checks against the schema.
|
|
||||||
run: |-
|
|
||||||
echo "{}" | check-jsonschema \
|
|
||||||
--fill-defaults --schemafile=schema/synapse-config.schema.yaml -
|
|
||||||
|
|
||||||
check-doc-generation:
|
|
||||||
name: Ensure generated documentation is up-to-date
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- name: Install PyYAML
|
|
||||||
run: pip install PyYAML==6.0.2
|
|
||||||
|
|
||||||
- name: Regenerate config documentation
|
|
||||||
run: |
|
|
||||||
scripts-dev/gen_config_documentation.py \
|
|
||||||
schema/synapse-config.schema.yaml \
|
|
||||||
> docs/usage/configuration/config_documentation.md
|
|
||||||
- name: Error in case of any differences
|
|
||||||
# Errors if there are now any modified files (untracked files are ignored).
|
|
||||||
run: 'git diff --exit-code'
|
|
||||||
216
.github/workflows/tests.yml
vendored
216
.github/workflows/tests.yml
vendored
@@ -11,9 +11,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||||
# don't modify Rust code.
|
# don't modify Rust code.
|
||||||
@@ -26,7 +23,7 @@ jobs:
|
|||||||
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
||||||
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
|
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
|
||||||
steps:
|
steps:
|
||||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
- uses: dorny/paths-filter@v3
|
||||||
id: filter
|
id: filter
|
||||||
# We only check on PRs
|
# We only check on PRs
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
@@ -86,16 +83,14 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||||
- run: poetry run scripts-dev/config-lint.sh
|
- run: poetry run scripts-dev/config-lint.sh
|
||||||
@@ -106,8 +101,8 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||||
@@ -116,8 +111,8 @@ jobs:
|
|||||||
check-lockfile:
|
check-lockfile:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: .ci/scripts/check_lockfile.py
|
- run: .ci/scripts/check_lockfile.py
|
||||||
@@ -129,12 +124,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "2.1.1"
|
|
||||||
install-project: "false"
|
install-project: "false"
|
||||||
|
|
||||||
- name: Run ruff check
|
- name: Run ruff check
|
||||||
@@ -151,16 +145,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
# We want to make use of type hints in optional dependencies too.
|
# We want to make use of type hints in optional dependencies too.
|
||||||
extras: all
|
extras: all
|
||||||
@@ -169,12 +161,11 @@ jobs:
|
|||||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
||||||
# To make CI green, err towards caution and install the project.
|
# To make CI green, err towards caution and install the project.
|
||||||
install-project: "true"
|
install-project: "true"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
# Cribbed from
|
# Cribbed from
|
||||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||||
- name: Restore/persist mypy's cache
|
- name: Restore/persist mypy's cache
|
||||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
.mypy_cache
|
.mypy_cache
|
||||||
@@ -187,7 +178,7 @@ jobs:
|
|||||||
lint-crlf:
|
lint-crlf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Check line endings
|
- name: Check line endings
|
||||||
run: scripts-dev/check_line_terminators.sh
|
run: scripts-dev/check_line_terminators.sh
|
||||||
|
|
||||||
@@ -195,11 +186,11 @@ jobs:
|
|||||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||||
@@ -213,17 +204,15 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
poetry-version: "1.3.2"
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
|
||||||
with:
|
|
||||||
poetry-version: "2.1.1"
|
|
||||||
extras: "all"
|
extras: "all"
|
||||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||||
|
|
||||||
@@ -233,14 +222,13 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
- uses: Swatinem/rust-cache@v2
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- run: cargo clippy -- -D warnings
|
- run: cargo clippy -- -D warnings
|
||||||
|
|
||||||
@@ -252,70 +240,32 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2025-04-23
|
toolchain: nightly-2022-12-01
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo clippy --all-features -- -D warnings
|
- run: cargo clippy --all-features -- -D warnings
|
||||||
|
|
||||||
lint-rust:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
|
||||||
with:
|
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
|
||||||
with:
|
|
||||||
# Install like a normal project from source with all optional dependencies
|
|
||||||
extras: all
|
|
||||||
install-project: "true"
|
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
|
|
||||||
# The `::error::` syntax is using GitHub Actions' error annotations, see
|
|
||||||
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
|
|
||||||
run: |
|
|
||||||
if git diff --quiet Cargo.lock; then
|
|
||||||
echo "Cargo.lock is up to date"
|
|
||||||
else
|
|
||||||
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
|
|
||||||
git diff --exit-code Cargo.lock
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
|
|
||||||
# for some of the unstable options we use in `.rustfmt.toml`.
|
|
||||||
lint-rustfmt:
|
lint-rustfmt:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
# We use nightly so that we can use some unstable options that we use in
|
# We use nightly so that it correctly groups together imports
|
||||||
# `.rustfmt.toml`.
|
toolchain: nightly-2022-12-01
|
||||||
toolchain: nightly-2025-04-23
|
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo fmt --check
|
- run: cargo fmt --check
|
||||||
|
|
||||||
@@ -326,8 +276,8 @@ jobs:
|
|||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install rstcheck"
|
- run: "pip install rstcheck"
|
||||||
@@ -347,12 +297,11 @@ jobs:
|
|||||||
- check-lockfile
|
- check-lockfile
|
||||||
- lint-clippy
|
- lint-clippy
|
||||||
- lint-clippy-nightly
|
- lint-clippy-nightly
|
||||||
- lint-rust
|
|
||||||
- lint-rustfmt
|
- lint-rustfmt
|
||||||
- lint-readme
|
- lint-readme
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
- uses: matrix-org/done-action@v3
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
@@ -366,7 +315,6 @@ jobs:
|
|||||||
lint-pydantic
|
lint-pydantic
|
||||||
lint-clippy
|
lint-clippy
|
||||||
lint-clippy-nightly
|
lint-clippy-nightly
|
||||||
lint-rust
|
|
||||||
lint-rustfmt
|
lint-rustfmt
|
||||||
lint-readme
|
lint-readme
|
||||||
|
|
||||||
@@ -376,8 +324,8 @@ jobs:
|
|||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- id: get-matrix
|
- id: get-matrix
|
||||||
@@ -397,7 +345,7 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -412,15 +360,13 @@ jobs:
|
|||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.job.python-version }}
|
python-version: ${{ matrix.job.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: ${{ matrix.job.extras }}
|
extras: ${{ matrix.job.extras }}
|
||||||
- name: Await PostgreSQL
|
- name: Await PostgreSQL
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -453,13 +399,11 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
# There aren't wheels for some of the older deps, so we need to install
|
||||||
# their build dependencies
|
# their build dependencies
|
||||||
@@ -468,7 +412,7 @@ jobs:
|
|||||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||||
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
|
|
||||||
@@ -518,13 +462,13 @@ jobs:
|
|||||||
extras: ["all"]
|
extras: ["all"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: ${{ matrix.extras }}
|
extras: ${{ matrix.extras }}
|
||||||
- run: poetry run trial --jobs=2 tests
|
- run: poetry run trial --jobs=2 tests
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
@@ -568,15 +512,13 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Prepare test blacklist
|
- name: Prepare test blacklist
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
run: /bootstrap.sh synapse
|
run: /bootstrap.sh synapse
|
||||||
@@ -585,7 +527,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||||
@@ -615,11 +557,11 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "postgres"
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
- run: .ci/scripts/test_export_data_command.sh
|
||||||
env:
|
env:
|
||||||
@@ -659,7 +601,7 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- name: Add PostgreSQL apt repository
|
- name: Add PostgreSQL apt repository
|
||||||
# We need a version of pg_dump that can handle the version of
|
# We need a version of pg_dump that can handle the version of
|
||||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||||
@@ -670,10 +612,10 @@ jobs:
|
|||||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "postgres"
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
- run: .ci/scripts/test_synapse_port_db.sh
|
||||||
id: run_tester_script
|
id: run_tester_script
|
||||||
@@ -683,7 +625,7 @@ jobs:
|
|||||||
PGPASSWORD: postgres
|
PGPASSWORD: postgres
|
||||||
PGDATABASE: postgres
|
PGDATABASE: postgres
|
||||||
- name: "Upload schema differences"
|
- name: "Upload schema differences"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||||
with:
|
with:
|
||||||
name: Schema dumps
|
name: Schema dumps
|
||||||
@@ -713,21 +655,19 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout synapse codebase
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -750,13 +690,11 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- run: cargo test
|
- run: cargo test
|
||||||
|
|
||||||
@@ -770,13 +708,13 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo bench --no-run
|
- run: cargo bench --no-run
|
||||||
|
|
||||||
@@ -795,7 +733,7 @@ jobs:
|
|||||||
- linting-done
|
- linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
- uses: matrix-org/done-action@v3
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/triage-incoming.yml
vendored
2
.github/workflows/triage-incoming.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
triage:
|
triage:
|
||||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3
|
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
|
||||||
with:
|
with:
|
||||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||||
content_id: ${{ github.event.issue.node_id }}
|
content_id: ${{ github.event.issue.node_id }}
|
||||||
|
|||||||
6
.github/workflows/triage_labelled.yml
vendored
6
.github/workflows/triage_labelled.yml
vendored
@@ -11,15 +11,11 @@ jobs:
|
|||||||
if: >
|
if: >
|
||||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/add-to-project@4515659e2b458b27365e167605ac44f219494b66 # v1.0.2
|
- uses: actions/add-to-project@main
|
||||||
id: add_project
|
id: add_project
|
||||||
with:
|
with:
|
||||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||||
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
# This action will error if the issue already exists on the project. Which is
|
|
||||||
# common as `X-Needs-Info` will often be added to issues that are already in
|
|
||||||
# the triage queue. Prevent the whole job from failing in this case.
|
|
||||||
continue-on-error: true
|
|
||||||
- name: Set status
|
- name: Set status
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
|||||||
45
.github/workflows/twisted_trunk.yml
vendored
45
.github/workflows/twisted_trunk.yml
vendored
@@ -20,9 +20,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_VERSION: 1.87.0
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
check_repo:
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||||
@@ -43,19 +40,16 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
||||||
@@ -70,20 +64,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
extras: "all test"
|
extras: "all test"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
@@ -117,13 +108,11 @@ jobs:
|
|||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
- uses: Swatinem/rust-cache@v2
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||||
@@ -147,7 +136,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -175,14 +164,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v4 for synapse
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -192,11 +181,11 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -x
|
set -x
|
||||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||||
pipx install poetry==2.1.1
|
pipx install poetry==1.3.2
|
||||||
|
|
||||||
poetry remove -n twisted
|
poetry remove -n twisted
|
||||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
poetry lock
|
poetry lock --no-update
|
||||||
working-directory: synapse
|
working-directory: synapse
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
@@ -217,7 +206,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -47,7 +47,6 @@ __pycache__/
|
|||||||
/.idea/
|
/.idea/
|
||||||
/.ropeproject/
|
/.ropeproject/
|
||||||
/.vscode/
|
/.vscode/
|
||||||
/.zed/
|
|
||||||
|
|
||||||
# build products
|
# build products
|
||||||
!/.coveragerc
|
!/.coveragerc
|
||||||
|
|||||||
@@ -1,6 +1 @@
|
|||||||
# Unstable options are only available on a nightly toolchain and must be opted into
|
|
||||||
unstable_features = true
|
|
||||||
|
|
||||||
# `group_imports` is an unstable option that requires nightly Rust toolchain. Tracked by
|
|
||||||
# https://github.com/rust-lang/rustfmt/issues/5083
|
|
||||||
group_imports = "StdExternalCrate"
|
group_imports = "StdExternalCrate"
|
||||||
|
|||||||
1061
CHANGES.md
1061
CHANGES.md
File diff suppressed because it is too large
Load Diff
1569
Cargo.lock
generated
1569
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +0,0 @@
|
|||||||
Licensees holding a valid commercial license with Element may use this
|
|
||||||
software in accordance with the terms contained in a written agreement
|
|
||||||
between you and Element.
|
|
||||||
|
|
||||||
To purchase a commercial license please contact our sales team at
|
|
||||||
licensing@element.io
|
|
||||||
56
README.rst
56
README.rst
@@ -8,28 +8,27 @@
|
|||||||
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
|
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
|
||||||
implementation, written and maintained by `Element <https://element.io>`_.
|
implementation, written and maintained by `Element <https://element.io>`_.
|
||||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
||||||
secure and interoperable real-time communications. You can directly run
|
secure and interoperable real time communications. You can directly run
|
||||||
and manage the source code in this repository, available under an AGPL
|
and manage the source code in this repository, available under an AGPL
|
||||||
license (or alternatively under a commercial license from Element).
|
license. There is no support provided from Element unless you have a
|
||||||
There is no support provided by Element unless you have a
|
subscription.
|
||||||
subscription from Element.
|
|
||||||
|
|
||||||
Subscription
|
Subscription alternative
|
||||||
============
|
========================
|
||||||
|
|
||||||
For those that need an enterprise-ready solution, Element
|
Alternatively, for those that need an enterprise-ready solution, Element
|
||||||
Server Suite (ESS) is `available via subscription <https://element.io/pricing>`_.
|
Server Suite (ESS) is `available as a subscription <https://element.io/pricing>`_.
|
||||||
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
||||||
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
||||||
giving admins the power to easily manage an organization-wide
|
giving admins the power to easily manage an organization-wide
|
||||||
deployment. It includes advanced identity management, auditing,
|
deployment. It includes advanced identity management, auditing,
|
||||||
moderation and data retention options as well as Long-Term Support and
|
moderation and data retention options as well as Long Term Support and
|
||||||
SLAs. ESS supports any Matrix-compatible client.
|
SLAs. ESS can be used to support any Matrix-based frontend client.
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
🛠️ Installation and configuration
|
🛠️ Installing and configuration
|
||||||
==================================
|
===============================
|
||||||
|
|
||||||
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
||||||
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
||||||
@@ -133,7 +132,7 @@ connect from a client: see
|
|||||||
An easy way to get started is to login or register via Element at
|
An easy way to get started is to login or register via Element at
|
||||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||||
You will need to change the server you are logging into from ``matrix.org``
|
You will need to change the server you are logging into from ``matrix.org``
|
||||||
and instead specify a homeserver URL of ``https://<server_name>:8448``
|
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||||
If you prefer to use another client, refer to our
|
If you prefer to use another client, refer to our
|
||||||
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
||||||
@@ -162,8 +161,9 @@ the public internet. Without it, anyone can freely register accounts on your hom
|
|||||||
This can be exploited by attackers to create spambots targeting the rest of the Matrix
|
This can be exploited by attackers to create spambots targeting the rest of the Matrix
|
||||||
federation.
|
federation.
|
||||||
|
|
||||||
Your new Matrix ID will be formed partly from the ``server_name``, and partly
|
Your new user name will be formed partly from the ``server_name``, and partly
|
||||||
from a localpart you specify when you create the account in the form of::
|
from a localpart you specify when you create the account. Your name will take
|
||||||
|
the form of::
|
||||||
|
|
||||||
@localpart:my.domain.name
|
@localpart:my.domain.name
|
||||||
|
|
||||||
@@ -208,10 +208,10 @@ Identity servers have the job of mapping email addresses and other 3rd Party
|
|||||||
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
||||||
before creating that mapping.
|
before creating that mapping.
|
||||||
|
|
||||||
**Identity servers do not store accounts or credentials - these are stored and managed on homeservers.
|
**They are not where accounts or credentials are stored - these live on home
|
||||||
Identity Servers are just for mapping 3rd Party IDs to Matrix IDs.**
|
servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
|
||||||
|
|
||||||
This process is highly security-sensitive, as there is an obvious risk of spam if it
|
This process is very security-sensitive, as there is obvious risk of spam if it
|
||||||
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
||||||
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
||||||
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
||||||
@@ -237,9 +237,9 @@ email address.
|
|||||||
We welcome contributions to Synapse from the community!
|
We welcome contributions to Synapse from the community!
|
||||||
The best place to get started is our
|
The best place to get started is our
|
||||||
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||||
This is part of our broader `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
||||||
information for Synapse developers as well as Synapse administrators.
|
|
||||||
|
|
||||||
|
information for Synapse developers as well as Synapse administrators.
|
||||||
Developers might be particularly interested in:
|
Developers might be particularly interested in:
|
||||||
|
|
||||||
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
||||||
@@ -249,22 +249,6 @@ Developers might be particularly interested in:
|
|||||||
Alongside all that, join our developer community on Matrix:
|
Alongside all that, join our developer community on Matrix:
|
||||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||||
|
|
||||||
Copyright and Licensing
|
|
||||||
=======================
|
|
||||||
|
|
||||||
| Copyright 2014-2017 OpenMarket Ltd
|
|
||||||
| Copyright 2017 Vector Creations Ltd
|
|
||||||
| Copyright 2017-2025 New Vector Ltd
|
|
||||||
|
|
|
||||||
|
|
||||||
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
|
|
||||||
|
|
||||||
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
|
|
||||||
|
|
||||||
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
|
||||||
|
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||||
:alt: (get community support in #synapse:matrix.org)
|
:alt: (get community support in #synapse:matrix.org)
|
||||||
|
|||||||
@@ -19,17 +19,17 @@ def build(setup_kwargs: Dict[str, Any]) -> None:
|
|||||||
# This flag is a no-op in the latest versions. Instead, we need to
|
# This flag is a no-op in the latest versions. Instead, we need to
|
||||||
# specify this in the `bdist_wheel` config below.
|
# specify this in the `bdist_wheel` config below.
|
||||||
py_limited_api=True,
|
py_limited_api=True,
|
||||||
# We always build in release mode, as we can't distinguish
|
# We force always building in release mode, as we can't tell the
|
||||||
# between using `poetry` in development vs production.
|
# difference between using `poetry` in development vs production.
|
||||||
debug=False,
|
debug=False,
|
||||||
)
|
)
|
||||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
||||||
setup_kwargs["zip_safe"] = False
|
setup_kwargs["zip_safe"] = False
|
||||||
|
|
||||||
# We look up the minimum supported Python version with
|
# We lookup the minimum supported python version by looking at
|
||||||
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first Python
|
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first python
|
||||||
# version that matches. We then convert that into the `py_limited_api` form,
|
# version that matches. We then convert that into the `py_limited_api` form,
|
||||||
# e.g. cp39 for Python 3.9.
|
# e.g. cp39 for python 3.9.
|
||||||
py_limited_api: str
|
py_limited_api: str
|
||||||
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
||||||
for minor_version in itertools.count(start=8):
|
for minor_version in itertools.count(start=8):
|
||||||
|
|||||||
1
changelog.d/17732.bugfix
Normal file
1
changelog.d/17732.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix membership caches not updating in state reset scenarios.
|
||||||
1
changelog.d/18035.feature
Normal file
1
changelog.d/18035.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add a unit test for the `type` parameter of the [Room State Admin API](https://element-hq.github.io/synapse/develop/admin_api/rooms.html#room-state-api).
|
||||||
1
changelog.d/18052.removal
Normal file
1
changelog.d/18052.removal
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove the unstable [MSC4151](https://github.com/matrix-org/matrix-spec-proposals/pull/4151) implementation. The stable support remains, per [Matrix 1.13](https://spec.matrix.org/v1.13/client-server-api/#post_matrixclientv3roomsroomidreport).
|
||||||
1
changelog.d/18072.misc
Normal file
1
changelog.d/18072.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Increase invite rate limits (`rc_invites.per_issuer`) for Complement.
|
||||||
@@ -51,7 +51,7 @@ services:
|
|||||||
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/postgres:15-alpine
|
image: docker.io/postgres:12-alpine
|
||||||
# Change that password, of course!
|
# Change that password, of course!
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=synapse
|
- POSTGRES_USER=synapse
|
||||||
|
|||||||
@@ -220,24 +220,29 @@
|
|||||||
"yBucketBound": "auto"
|
"yBucketBound": "auto"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
|
||||||
"uid": "${DS_PROMETHEUS}",
|
|
||||||
"type": "prometheus"
|
|
||||||
},
|
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
"dashLength": 10,
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"description": "",
|
||||||
"fieldConfig": {
|
"fieldConfig": {
|
||||||
"defaults": {
|
"defaults": {
|
||||||
"links": []
|
"links": []
|
||||||
},
|
},
|
||||||
"overrides": []
|
"overrides": []
|
||||||
},
|
},
|
||||||
|
"fill": 0,
|
||||||
|
"fillGradient": 0,
|
||||||
"gridPos": {
|
"gridPos": {
|
||||||
"h": 9,
|
"h": 9,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 12,
|
"x": 12,
|
||||||
"y": 1
|
"y": 1
|
||||||
},
|
},
|
||||||
|
"hiddenSeries": false,
|
||||||
"id": 152,
|
"id": 152,
|
||||||
"legend": {
|
"legend": {
|
||||||
"avg": false,
|
"avg": false,
|
||||||
@@ -250,81 +255,71 @@
|
|||||||
"values": false
|
"values": false
|
||||||
},
|
},
|
||||||
"lines": true,
|
"lines": true,
|
||||||
|
"linewidth": 0,
|
||||||
|
"links": [],
|
||||||
"nullPointMode": "connected",
|
"nullPointMode": "connected",
|
||||||
"options": {
|
"options": {
|
||||||
"alertThreshold": true
|
"alertThreshold": true
|
||||||
},
|
},
|
||||||
"paceLength": 10,
|
"paceLength": 10,
|
||||||
"pluginVersion": "10.4.3",
|
"percentage": false,
|
||||||
|
"pluginVersion": "9.2.2",
|
||||||
"pointradius": 5,
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
"renderer": "flot",
|
"renderer": "flot",
|
||||||
"seriesOverrides": [
|
"seriesOverrides": [
|
||||||
{
|
{
|
||||||
"alias": "Avg",
|
"alias": "Avg",
|
||||||
"fill": 0,
|
"fill": 0,
|
||||||
"linewidth": 3,
|
"linewidth": 3
|
||||||
"$$hashKey": "object:48"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "99%",
|
"alias": "99%",
|
||||||
"color": "#C4162A",
|
"color": "#C4162A",
|
||||||
"fillBelowTo": "90%",
|
"fillBelowTo": "90%"
|
||||||
"$$hashKey": "object:49"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "90%",
|
"alias": "90%",
|
||||||
"color": "#FF7383",
|
"color": "#FF7383",
|
||||||
"fillBelowTo": "75%",
|
"fillBelowTo": "75%"
|
||||||
"$$hashKey": "object:50"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "75%",
|
"alias": "75%",
|
||||||
"color": "#FFEE52",
|
"color": "#FFEE52",
|
||||||
"fillBelowTo": "50%",
|
"fillBelowTo": "50%"
|
||||||
"$$hashKey": "object:51"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "50%",
|
"alias": "50%",
|
||||||
"color": "#73BF69",
|
"color": "#73BF69",
|
||||||
"fillBelowTo": "25%",
|
"fillBelowTo": "25%"
|
||||||
"$$hashKey": "object:52"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "25%",
|
"alias": "25%",
|
||||||
"color": "#1F60C4",
|
"color": "#1F60C4",
|
||||||
"fillBelowTo": "5%",
|
"fillBelowTo": "5%"
|
||||||
"$$hashKey": "object:53"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "5%",
|
"alias": "5%",
|
||||||
"lines": false,
|
"lines": false
|
||||||
"$$hashKey": "object:54"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "Average",
|
"alias": "Average",
|
||||||
"color": "rgb(255, 255, 255)",
|
"color": "rgb(255, 255, 255)",
|
||||||
"lines": true,
|
"lines": true,
|
||||||
"linewidth": 3,
|
"linewidth": 3
|
||||||
"$$hashKey": "object:55"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"alias": "Local events being persisted",
|
"alias": "Events",
|
||||||
"color": "#96d98D",
|
|
||||||
"points": true,
|
|
||||||
"yaxis": 2,
|
|
||||||
"zindex": -3,
|
|
||||||
"$$hashKey": "object:56"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$$hashKey": "object:329",
|
|
||||||
"color": "#B877D9",
|
"color": "#B877D9",
|
||||||
"alias": "All events being persisted",
|
"hideTooltip": true,
|
||||||
"points": true,
|
"points": true,
|
||||||
"yaxis": 2,
|
"yaxis": 2,
|
||||||
"zindex": -3
|
"zindex": -3
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"spaceLength": 10,
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -389,20 +384,7 @@
|
|||||||
},
|
},
|
||||||
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
||||||
"legendFormat": "Average",
|
"legendFormat": "Average",
|
||||||
"refId": "H",
|
"refId": "H"
|
||||||
"editorMode": "code",
|
|
||||||
"range": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": {
|
|
||||||
"uid": "${DS_PROMETHEUS}"
|
|
||||||
},
|
|
||||||
"expr": "sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
|
|
||||||
"hide": false,
|
|
||||||
"instant": false,
|
|
||||||
"legendFormat": "Local events being persisted",
|
|
||||||
"refId": "E",
|
|
||||||
"editorMode": "code"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -411,9 +393,8 @@
|
|||||||
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
|
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
|
||||||
"hide": false,
|
"hide": false,
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "All events being persisted",
|
"legendFormat": "Events",
|
||||||
"refId": "I",
|
"refId": "E"
|
||||||
"editorMode": "code"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"thresholds": [
|
"thresholds": [
|
||||||
@@ -447,9 +428,7 @@
|
|||||||
"xaxis": {
|
"xaxis": {
|
||||||
"mode": "time",
|
"mode": "time",
|
||||||
"show": true,
|
"show": true,
|
||||||
"values": [],
|
"values": []
|
||||||
"name": null,
|
|
||||||
"buckets": null
|
|
||||||
},
|
},
|
||||||
"yaxes": [
|
"yaxes": [
|
||||||
{
|
{
|
||||||
@@ -471,20 +450,7 @@
|
|||||||
],
|
],
|
||||||
"yaxis": {
|
"yaxis": {
|
||||||
"align": false
|
"align": false
|
||||||
},
|
}
|
||||||
"bars": false,
|
|
||||||
"dashes": false,
|
|
||||||
"description": "",
|
|
||||||
"fill": 0,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"hiddenSeries": false,
|
|
||||||
"linewidth": 0,
|
|
||||||
"percentage": false,
|
|
||||||
"points": false,
|
|
||||||
"stack": false,
|
|
||||||
"steppedLine": false,
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
@@ -4396,7 +4362,7 @@
|
|||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "{{origin_server_name}} ",
|
"legendFormat": "{{server_name}} ",
|
||||||
"range": true,
|
"range": true,
|
||||||
"refId": "A"
|
"refId": "A"
|
||||||
}
|
}
|
||||||
@@ -4518,7 +4484,7 @@
|
|||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"legendFormat": "{{destination_server_name}}",
|
"legendFormat": "{{server_name}}",
|
||||||
"range": true,
|
"range": true,
|
||||||
"refId": "A"
|
"refId": "A"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -45,10 +45,6 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|||||||
colors = {"red", "green", "blue", "yellow", "purple"}
|
colors = {"red", "green", "blue", "yellow", "purple"}
|
||||||
|
|
||||||
for pdu in pdus:
|
for pdu in pdus:
|
||||||
# TODO: The "origin" field has since been removed from events generated
|
|
||||||
# by Synapse. We should consider removing it here as well but since this
|
|
||||||
# is part of `contrib/`, it is left for the community to revise and ensure things
|
|
||||||
# still work correctly.
|
|
||||||
origins.add(pdu.get("origin"))
|
origins.add(pdu.get("origin"))
|
||||||
|
|
||||||
color_map = {color: color for color in colors if color in origins}
|
color_map = {color: color for color in colors if color in origins}
|
||||||
|
|||||||
2
debian/build_virtualenv
vendored
2
debian/build_virtualenv
vendored
@@ -35,7 +35,7 @@ TEMP_VENV="$(mktemp -d)"
|
|||||||
python3 -m venv "$TEMP_VENV"
|
python3 -m venv "$TEMP_VENV"
|
||||||
source "$TEMP_VENV/bin/activate"
|
source "$TEMP_VENV/bin/activate"
|
||||||
pip install -U pip
|
pip install -U pip
|
||||||
pip install poetry==2.1.1 poetry-plugin-export==1.9.0
|
pip install poetry==1.3.2
|
||||||
poetry export \
|
poetry export \
|
||||||
--extras all \
|
--extras all \
|
||||||
--extras test \
|
--extras test \
|
||||||
|
|||||||
277
debian/changelog
vendored
277
debian/changelog
vendored
@@ -1,280 +1,3 @@
|
|||||||
matrix-synapse-py3 (1.139.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Sep 2025 11:21:25 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.138.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Sep 2025 12:16:14 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.137.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.137.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Aug 2025 10:23:41 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.137.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.137.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Aug 2025 10:55:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Aug 2025 13:18:03 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 12:18:52 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.136.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.136.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Aug 2025 08:13:30 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:52:01 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:13:15 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Aug 2025 13:12:28 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jul 2025 12:19:14 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.135.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.135.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Jul 2025 12:08:37 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.134.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.134.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jul 2025 14:22:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.134.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Jul 2025 11:27:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.133.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.133.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jul 2025 13:13:24 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.133.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.133.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Jun 2025 11:57:47 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.132.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.132.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jun 2025 13:16:20 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.132.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.132.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jun 2025 11:15:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.131.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.131.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Jun 2025 14:36:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.131.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.131.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 28 May 2025 10:25:44 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.130.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.130.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 May 2025 08:34:13 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.130.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.130.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 May 2025 10:44:04 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.129.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 May 2025 12:22:11 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.129.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Apr 2025 13:13:16 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.129.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.129.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Apr 2025 10:47:43 -0600
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.128.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.128.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Apr 2025 14:09:54 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.128.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* Update Poetry to 2.1.1.
|
|
||||||
* New synapse release 1.128.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Apr 2025 14:35:33 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Mar 2025 21:07:31 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Mar 2025 12:04:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Mar 2025 13:30:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Mar 2025 13:11:29 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Mar 2025 15:45:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Mar 2025 14:29:12 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Mar 2025 13:11:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.125.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.125.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Feb 2025 08:10:07 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.125.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.125.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Feb 2025 13:32:49 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Feb 2025 11:55:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Feb 2025 13:42:55 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Feb 2025 16:35:53 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Feb 2025 11:53:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.123.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.123.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2025 08:37:34 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.123.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.123.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Jan 2025 14:39:57 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.122.0) stable; urgency=medium
|
matrix-synapse-py3 (1.122.0) stable; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.122.0.
|
* New Synapse release 1.122.0.
|
||||||
|
|||||||
@@ -138,13 +138,6 @@ for port in 8080 8081 8082; do
|
|||||||
per_user:
|
per_user:
|
||||||
per_second: 1000
|
per_second: 1000
|
||||||
burst_count: 1000
|
burst_count: 1000
|
||||||
rc_presence:
|
|
||||||
per_user:
|
|
||||||
per_second: 1000
|
|
||||||
burst_count: 1000
|
|
||||||
rc_delayed_event_mgmt:
|
|
||||||
per_second: 1000
|
|
||||||
burst_count: 1000
|
|
||||||
RC
|
RC
|
||||||
)
|
)
|
||||||
echo "${ratelimiting}" >> "$port.config"
|
echo "${ratelimiting}" >> "$port.config"
|
||||||
|
|||||||
@@ -20,16 +20,45 @@
|
|||||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||||
# in `poetry export` in the past.
|
# in `poetry export` in the past.
|
||||||
|
|
||||||
ARG DEBIAN_VERSION=bookworm
|
|
||||||
ARG PYTHON_VERSION=3.12
|
ARG PYTHON_VERSION=3.12
|
||||||
ARG POETRY_VERSION=2.1.1
|
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
### This stage is platform-agnostic, so we can use the build platform in case of cross-compilation.
|
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||||
###
|
# and other Dockerfiles used for testing are expecting bookworm.
|
||||||
FROM --platform=$BUILDPLATFORM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS requirements
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS requirements
|
||||||
|
|
||||||
|
# RUN --mount is specific to buildkit and is documented at
|
||||||
|
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||||
|
# Here we use it to set up a cache for apt (and below for pip), to improve
|
||||||
|
# rebuild speeds on slow connections.
|
||||||
|
RUN \
|
||||||
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
apt-get update -qq && apt-get install -yqq \
|
||||||
|
build-essential curl git libffi-dev libssl-dev pkg-config \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install rust and ensure its in the PATH.
|
||||||
|
# (Rust may be needed to compile `cryptography`---which is one of poetry's
|
||||||
|
# dependencies---on platforms that don't have a `cryptography` wheel.
|
||||||
|
ENV RUSTUP_HOME=/rust
|
||||||
|
ENV CARGO_HOME=/cargo
|
||||||
|
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||||
|
RUN mkdir /rust /cargo
|
||||||
|
|
||||||
|
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||||
|
|
||||||
|
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||||
|
# set to true, so we expose it as a build-arg.
|
||||||
|
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||||
|
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||||
|
|
||||||
|
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||||
|
# synapse's dependencies.
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
pip install --user "poetry==1.3.2"
|
||||||
|
|
||||||
WORKDIR /synapse
|
WORKDIR /synapse
|
||||||
|
|
||||||
@@ -46,18 +75,11 @@ ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION
|
|||||||
# Instead, we'll just install what a regular `pip install` would from PyPI.
|
# Instead, we'll just install what a regular `pip install` would from PyPI.
|
||||||
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||||
|
|
||||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
|
||||||
ENV UV_LINK_MODE=copy
|
|
||||||
|
|
||||||
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
|
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
|
||||||
# Otherwise, just create an empty requirements file so that the Dockerfile can
|
# Otherwise, just create an empty requirements file so that the Dockerfile can
|
||||||
# proceed.
|
# proceed.
|
||||||
ARG POETRY_VERSION
|
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
||||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
|
||||||
uvx --with poetry-plugin-export==1.9.0 \
|
|
||||||
poetry@${POETRY_VERSION} export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
|
||||||
else \
|
else \
|
||||||
touch /synapse/requirements.txt; \
|
touch /synapse/requirements.txt; \
|
||||||
fi
|
fi
|
||||||
@@ -65,11 +87,29 @@ RUN --mount=type=cache,target=/root/.cache/uv \
|
|||||||
###
|
###
|
||||||
### Stage 1: builder
|
### Stage 1: builder
|
||||||
###
|
###
|
||||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS builder
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS builder
|
||||||
|
|
||||||
|
# install the OS build deps
|
||||||
|
RUN \
|
||||||
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
apt-get update -qq && apt-get install -yqq \
|
||||||
|
build-essential \
|
||||||
|
libffi-dev \
|
||||||
|
libjpeg-dev \
|
||||||
|
libpq-dev \
|
||||||
|
libssl-dev \
|
||||||
|
libwebp-dev \
|
||||||
|
libxml++2.6-dev \
|
||||||
|
libxslt1-dev \
|
||||||
|
openssl \
|
||||||
|
zlib1g-dev \
|
||||||
|
git \
|
||||||
|
curl \
|
||||||
|
libicu-dev \
|
||||||
|
pkg-config \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
|
||||||
ENV UV_LINK_MODE=copy
|
|
||||||
|
|
||||||
# Install rust and ensure its in the PATH
|
# Install rust and ensure its in the PATH
|
||||||
ENV RUSTUP_HOME=/rust
|
ENV RUSTUP_HOME=/rust
|
||||||
@@ -79,6 +119,7 @@ RUN mkdir /rust /cargo
|
|||||||
|
|
||||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||||
|
|
||||||
|
|
||||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||||
# set to true, so we expose it as a build-arg.
|
# set to true, so we expose it as a build-arg.
|
||||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||||
@@ -90,8 +131,8 @@ ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
|||||||
#
|
#
|
||||||
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
||||||
COPY --from=requirements /synapse/requirements.txt /synapse/
|
COPY --from=requirements /synapse/requirements.txt /synapse/
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
uv pip install --prefix="/install" --no-deps -r /synapse/requirements.txt
|
pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
|
||||||
|
|
||||||
# Copy over the rest of the synapse source code.
|
# Copy over the rest of the synapse source code.
|
||||||
COPY synapse /synapse/synapse/
|
COPY synapse /synapse/synapse/
|
||||||
@@ -105,36 +146,29 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
|||||||
# Install the synapse package itself.
|
# Install the synapse package itself.
|
||||||
# If we have populated requirements.txt, we don't install any dependencies
|
# If we have populated requirements.txt, we don't install any dependencies
|
||||||
# as we should already have those from the previous `pip install` step.
|
# as we should already have those from the previous `pip install` step.
|
||||||
RUN \
|
RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||||
--mount=type=cache,target=/root/.cache/uv \
|
|
||||||
--mount=type=cache,target=/synapse/target,sharing=locked \
|
|
||||||
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
||||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||||
uv pip install --prefix="/install" --no-deps /synapse[all]; \
|
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
|
||||||
else \
|
else \
|
||||||
uv pip install --prefix="/install" /synapse[all]; \
|
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 2: runtime dependencies download for ARM64 and AMD64
|
### Stage 2: runtime
|
||||||
###
|
###
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/library/debian:${DEBIAN_VERSION} AS runtime-deps
|
|
||||||
|
|
||||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
|
||||||
|
|
||||||
# Add both target architectures
|
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||||
RUN dpkg --add-architecture arm64
|
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||||
RUN dpkg --add-architecture amd64
|
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||||
|
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||||
|
|
||||||
# Fetch the runtime dependencies debs for both architectures
|
|
||||||
# We do that by building a recursive list of packages we need to download with `apt-cache depends`
|
|
||||||
# and then downloading them with `apt-get download`.
|
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
apt-get update -qq && \
|
apt-get update -qq && apt-get install -yqq \
|
||||||
apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \
|
|
||||||
curl \
|
curl \
|
||||||
gosu \
|
gosu \
|
||||||
libjpeg62-turbo \
|
libjpeg62-turbo \
|
||||||
@@ -142,48 +176,11 @@ RUN \
|
|||||||
libwebp7 \
|
libwebp7 \
|
||||||
xmlsec1 \
|
xmlsec1 \
|
||||||
libjemalloc2 \
|
libjemalloc2 \
|
||||||
libicu \
|
libicu72 \
|
||||||
| grep '^\w' > /tmp/pkg-list && \
|
libssl-dev \
|
||||||
for arch in arm64 amd64; do \
|
openssl \
|
||||||
mkdir -p /tmp/debs-${arch} && \
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
cd /tmp/debs-${arch} && \
|
|
||||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
|
||||||
done
|
|
||||||
|
|
||||||
# Extract the debs for each architecture
|
|
||||||
RUN \
|
|
||||||
for arch in arm64 amd64; do \
|
|
||||||
mkdir -p /install-${arch}/var/lib/dpkg/status.d/ && \
|
|
||||||
for deb in /tmp/debs-${arch}/*.deb; do \
|
|
||||||
package_name=$(dpkg-deb -I ${deb} | awk '/^ Package: .*$/ {print $2}'); \
|
|
||||||
echo "Extracting: ${package_name}"; \
|
|
||||||
dpkg --ctrl-tarfile $deb | tar -Ox ./control > /install-${arch}/var/lib/dpkg/status.d/${package_name}; \
|
|
||||||
dpkg --extract $deb /install-${arch}; \
|
|
||||||
done; \
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
###
|
|
||||||
### Stage 3: runtime
|
|
||||||
###
|
|
||||||
|
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
|
||||||
|
|
||||||
ARG TARGETARCH
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
|
||||||
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
|
||||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
|
||||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
|
||||||
|
|
||||||
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
|
|
||||||
# libraries to the right place, else the `COPY` won't work.
|
|
||||||
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
|
|
||||||
# already present in the runtime image.
|
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
|
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
|
||||||
COPY --from=builder /install /usr/local
|
COPY --from=builder /install /usr/local
|
||||||
COPY ./docker/start.py /start.py
|
COPY ./docker/start.py /start.py
|
||||||
COPY ./docker/conf /conf
|
COPY ./docker/conf /conf
|
||||||
|
|||||||
@@ -2,38 +2,18 @@
|
|||||||
|
|
||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=bookworm
|
|
||||||
ARG PYTHON_VERSION=3.12
|
|
||||||
|
|
||||||
# first of all, we create a base image with dependencies which we can copy into the
|
# first of all, we create a base image with an nginx which we can copy into the
|
||||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||||
# each time.
|
# each time.
|
||||||
|
|
||||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
FROM docker.io/library/debian:bookworm-slim AS deps_base
|
||||||
|
|
||||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
apt-get update -qq && \
|
apt-get update -qq && \
|
||||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||||
nginx-light
|
redis-server nginx-light
|
||||||
|
|
||||||
RUN \
|
|
||||||
# remove default page
|
|
||||||
rm /etc/nginx/sites-enabled/default && \
|
|
||||||
# have nginx log to stderr/out
|
|
||||||
ln -sf /dev/stdout /var/log/nginx/access.log && \
|
|
||||||
ln -sf /dev/stderr /var/log/nginx/error.log
|
|
||||||
|
|
||||||
# --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache
|
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
|
||||||
uv pip install --link-mode=copy --prefix="/uv/usr/local" supervisor~=4.2
|
|
||||||
|
|
||||||
RUN mkdir -p /uv/etc/supervisor/conf.d
|
|
||||||
|
|
||||||
# Similarly, a base to copy the redis server from.
|
# Similarly, a base to copy the redis server from.
|
||||||
#
|
#
|
||||||
@@ -41,21 +21,31 @@ FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
|||||||
# which makes it much easier to copy (but we need to make sure we use an image
|
# which makes it much easier to copy (but we need to make sure we use an image
|
||||||
# based on the same debian version as the synapse image, to make sure we get
|
# based on the same debian version as the synapse image, to make sure we get
|
||||||
# the expected version of libc.
|
# the expected version of libc.
|
||||||
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
|
FROM docker.io/library/redis:7-bookworm AS redis_base
|
||||||
|
|
||||||
# now build the final image, based on the the regular Synapse docker image
|
# now build the final image, based on the the regular Synapse docker image
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
|
|
||||||
# Copy over dependencies
|
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||||
|
# copy of python.
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
pip install supervisor~=4.2
|
||||||
|
RUN mkdir -p /etc/supervisor/conf.d
|
||||||
|
|
||||||
|
# Copy over redis and nginx
|
||||||
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||||
COPY --from=deps_base /uv /
|
|
||||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||||
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
|
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
|
||||||
COPY --from=deps_base /etc/nginx /etc/nginx
|
COPY --from=deps_base /etc/nginx /etc/nginx
|
||||||
COPY --from=deps_base /var/log/nginx /var/log/nginx
|
RUN rm /etc/nginx/sites-enabled/default
|
||||||
# chown to allow non-root user to write to http-*-temp-path dirs
|
RUN mkdir /var/log/nginx /var/lib/nginx
|
||||||
COPY --from=deps_base --chown=www-data:root /var/lib/nginx /var/lib/nginx
|
RUN chown www-data /var/lib/nginx
|
||||||
|
|
||||||
|
# have nginx log to stderr/out
|
||||||
|
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
||||||
|
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
||||||
|
|
||||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||||
COPY ./docker/conf-workers/* /conf/
|
COPY ./docker/conf-workers/* /conf/
|
||||||
@@ -74,4 +64,4 @@ FROM $FROM
|
|||||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||||
# is generated by configure_workers_and_start.py.
|
# is generated by configure_workers_and_start.py.
|
||||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||||
CMD ["/healthcheck.sh"]
|
CMD /bin/sh /healthcheck.sh
|
||||||
|
|||||||
@@ -114,9 +114,6 @@ The following environment variables are supported in `run` mode:
|
|||||||
is set via `docker run --user`, defaults to `991`, `991`. Note that this user
|
is set via `docker run --user`, defaults to `991`, `991`. Note that this user
|
||||||
must have permission to read the config files, and write to the data directories.
|
must have permission to read the config files, and write to the data directories.
|
||||||
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
||||||
* `SYNAPSE_HTTP_PROXY`: Passed through to the Synapse process as the `http_proxy` environment variable.
|
|
||||||
* `SYNAPSE_HTTPS_PROXY`: Passed through to the Synapse process as the `https_proxy` environment variable.
|
|
||||||
* `SYNAPSE_NO_PROXY`: Passed through to the Synapse process as `no_proxy` environment variable.
|
|
||||||
|
|
||||||
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:
|
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:
|
||||||
|
|
||||||
|
|||||||
@@ -9,9 +9,6 @@
|
|||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=bookworm
|
|
||||||
|
|
||||||
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
|
||||||
|
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
# First of all, we copy postgres server from the official postgres image,
|
# First of all, we copy postgres server from the official postgres image,
|
||||||
@@ -23,9 +20,9 @@ FROM $FROM
|
|||||||
# the same debian version as Synapse's docker image (so the versions of the
|
# the same debian version as Synapse's docker image (so the versions of the
|
||||||
# shared libraries match).
|
# shared libraries match).
|
||||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
|
||||||
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
|
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||||
ENV PGDATA=/var/lib/postgresql/data
|
ENV PGDATA=/var/lib/postgresql/data
|
||||||
|
|
||||||
@@ -58,4 +55,4 @@ ENTRYPOINT ["/start_for_complement.sh"]
|
|||||||
|
|
||||||
# Update the healthcheck to have a shorter check interval
|
# Update the healthcheck to have a shorter check interval
|
||||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||||
CMD ["/healthcheck.sh"]
|
CMD /bin/sh /healthcheck.sh
|
||||||
|
|||||||
@@ -5,12 +5,12 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
echo "Complement Synapse launcher"
|
echo "Complement Synapse launcher"
|
||||||
echo " Args: $*"
|
echo " Args: $@"
|
||||||
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
||||||
|
|
||||||
function log {
|
function log {
|
||||||
d=$(printf '%(%Y-%m-%d %H:%M:%S)T,%.3s\n' ${EPOCHREALTIME/./ })
|
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||||
echo "$d $*"
|
echo "$d $@"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Set the server name of the homeserver
|
# Set the server name of the homeserver
|
||||||
@@ -54,6 +54,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
|||||||
export SYNAPSE_WORKER_TYPES="\
|
export SYNAPSE_WORKER_TYPES="\
|
||||||
event_persister:2, \
|
event_persister:2, \
|
||||||
background_worker, \
|
background_worker, \
|
||||||
|
frontend_proxy, \
|
||||||
event_creator, \
|
event_creator, \
|
||||||
user_dir, \
|
user_dir, \
|
||||||
media_repository, \
|
media_repository, \
|
||||||
@@ -64,7 +65,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
|||||||
client_reader, \
|
client_reader, \
|
||||||
appservice, \
|
appservice, \
|
||||||
pusher, \
|
pusher, \
|
||||||
device_lists:2, \
|
|
||||||
stream_writers=account_data+presence+receipts+to_device+typing"
|
stream_writers=account_data+presence+receipts+to_device+typing"
|
||||||
|
|
||||||
fi
|
fi
|
||||||
@@ -103,11 +103,12 @@ fi
|
|||||||
# Note that both the key and certificate are in PEM format (not DER).
|
# Note that both the key and certificate are in PEM format (not DER).
|
||||||
|
|
||||||
# First generate a configuration file to set up a Subject Alternative Name.
|
# First generate a configuration file to set up a Subject Alternative Name.
|
||||||
echo "\
|
cat > /conf/server.tls.conf <<EOF
|
||||||
.include /etc/ssl/openssl.cnf
|
.include /etc/ssl/openssl.cnf
|
||||||
|
|
||||||
[SAN]
|
[SAN]
|
||||||
subjectAltName=DNS:${SERVER_NAME}" > /conf/server.tls.conf
|
subjectAltName=DNS:${SERVER_NAME}
|
||||||
|
EOF
|
||||||
|
|
||||||
# Generate an RSA key
|
# Generate an RSA key
|
||||||
openssl genrsa -out /conf/server.tls.key 2048
|
openssl genrsa -out /conf/server.tls.key 2048
|
||||||
@@ -122,12 +123,12 @@ openssl x509 -req -in /conf/server.tls.csr \
|
|||||||
-out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN
|
-out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN
|
||||||
|
|
||||||
# Assert that we have a Subject Alternative Name in the certificate.
|
# Assert that we have a Subject Alternative Name in the certificate.
|
||||||
# (the test will exit with 1 here if there isn't a SAN in the certificate.)
|
# (grep will exit with 1 here if there isn't a SAN in the certificate.)
|
||||||
[[ $(openssl x509 -in /conf/server.tls.crt -noout -text) == *DNS:* ]]
|
openssl x509 -in /conf/server.tls.crt -noout -text | grep DNS:
|
||||||
|
|
||||||
export SYNAPSE_TLS_CERT=/conf/server.tls.crt
|
export SYNAPSE_TLS_CERT=/conf/server.tls.crt
|
||||||
export SYNAPSE_TLS_KEY=/conf/server.tls.key
|
export SYNAPSE_TLS_KEY=/conf/server.tls.key
|
||||||
|
|
||||||
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||||
# starts everything else
|
# starts everything else
|
||||||
exec /configure_workers_and_start.py "$@"
|
exec /configure_workers_and_start.py
|
||||||
|
|||||||
@@ -89,19 +89,6 @@ rc_invites:
|
|||||||
per_second: 1000
|
per_second: 1000
|
||||||
burst_count: 1000
|
burst_count: 1000
|
||||||
|
|
||||||
rc_presence:
|
|
||||||
per_user:
|
|
||||||
per_second: 9999
|
|
||||||
burst_count: 9999
|
|
||||||
|
|
||||||
rc_delayed_event_mgmt:
|
|
||||||
per_second: 9999
|
|
||||||
burst_count: 9999
|
|
||||||
|
|
||||||
rc_room_creation:
|
|
||||||
per_second: 9999
|
|
||||||
burst_count: 9999
|
|
||||||
|
|
||||||
federation_rr_transactions_per_room_per_second: 9999
|
federation_rr_transactions_per_room_per_second: 9999
|
||||||
|
|
||||||
allow_device_name_lookup_over_federation: true
|
allow_device_name_lookup_over_federation: true
|
||||||
@@ -131,10 +118,6 @@ experimental_features:
|
|||||||
msc3983_appservice_otk_claims: true
|
msc3983_appservice_otk_claims: true
|
||||||
# Proxy key queries to exclusive ASes
|
# Proxy key queries to exclusive ASes
|
||||||
msc3984_appservice_key_query: true
|
msc3984_appservice_key_query: true
|
||||||
# Invite filtering
|
|
||||||
msc4155_enabled: true
|
|
||||||
# Thread Subscriptions
|
|
||||||
msc4306_enabled: true
|
|
||||||
|
|
||||||
server_notices:
|
server_notices:
|
||||||
system_mxid_localpart: _server
|
system_mxid_localpart: _server
|
||||||
@@ -151,9 +134,4 @@ caches:
|
|||||||
sync_response_cache_duration: 0
|
sync_response_cache_duration: 0
|
||||||
|
|
||||||
|
|
||||||
# Complement assumes that it can publish to the room list by default.
|
|
||||||
room_list_publication_rules:
|
|
||||||
- action: allow
|
|
||||||
|
|
||||||
|
|
||||||
{% include "shared-orig.yaml.j2" %}
|
{% include "shared-orig.yaml.j2" %}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
{% if use_forking_launcher %}
|
{% if use_forking_launcher %}
|
||||||
[program:synapse_fork]
|
[program:synapse_fork]
|
||||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
|
||||||
command=/usr/local/bin/python -m synapse.app.complement_fork_starter
|
command=/usr/local/bin/python -m synapse.app.complement_fork_starter
|
||||||
{{ main_config_path }}
|
{{ main_config_path }}
|
||||||
synapse.app.homeserver
|
synapse.app.homeserver
|
||||||
@@ -21,7 +20,6 @@ exitcodes=0
|
|||||||
|
|
||||||
{% else %}
|
{% else %}
|
||||||
[program:synapse_main]
|
[program:synapse_main]
|
||||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
|
||||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
|
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
|
||||||
--config-path="{{ main_config_path }}"
|
--config-path="{{ main_config_path }}"
|
||||||
--config-path=/conf/workers/shared.yaml
|
--config-path=/conf/workers/shared.yaml
|
||||||
@@ -38,7 +36,6 @@ exitcodes=0
|
|||||||
|
|
||||||
{% for worker in workers %}
|
{% for worker in workers %}
|
||||||
[program:synapse_{{ worker.name }}]
|
[program:synapse_{{ worker.name }}]
|
||||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
|
||||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
|
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
|
||||||
--config-path="{{ main_config_path }}"
|
--config-path="{{ main_config_path }}"
|
||||||
--config-path=/conf/workers/shared.yaml
|
--config-path=/conf/workers/shared.yaml
|
||||||
|
|||||||
@@ -77,13 +77,6 @@ loggers:
|
|||||||
#}
|
#}
|
||||||
synapse.visibility.filtered_event_debug:
|
synapse.visibility.filtered_event_debug:
|
||||||
level: DEBUG
|
level: DEBUG
|
||||||
|
|
||||||
{#
|
|
||||||
If Synapse is under test, we don't care about seeing the "Applying schema" log
|
|
||||||
lines at the INFO level every time we run the tests (it's 100 lines of bulk)
|
|
||||||
#}
|
|
||||||
synapse.storage.prepare_database:
|
|
||||||
level: WARN
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
root:
|
root:
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/local/bin/python
|
#!/usr/bin/env python
|
||||||
#
|
#
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
#
|
#
|
||||||
@@ -178,9 +178,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/deactivate$",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)",
|
|
||||||
"^/_matrix/client/(r0|v3)/delete_devices$",
|
|
||||||
"^/_matrix/client/versions$",
|
"^/_matrix/client/versions$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||||
@@ -197,9 +194,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
|
|
||||||
],
|
],
|
||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
@@ -208,7 +202,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["federation"],
|
"listener_resources": ["federation"],
|
||||||
"endpoint_patterns": [
|
"endpoint_patterns": [
|
||||||
"^/_matrix/federation/v1/version$",
|
|
||||||
"^/_matrix/federation/(v1|v2)/event/",
|
"^/_matrix/federation/(v1|v2)/event/",
|
||||||
"^/_matrix/federation/(v1|v2)/state/",
|
"^/_matrix/federation/(v1|v2)/state/",
|
||||||
"^/_matrix/federation/(v1|v2)/state_ids/",
|
"^/_matrix/federation/(v1|v2)/state_ids/",
|
||||||
@@ -271,6 +264,13 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
|
"frontend_proxy": {
|
||||||
|
"app": "synapse.app.generic_worker",
|
||||||
|
"listener_resources": ["client", "replication"],
|
||||||
|
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
|
||||||
|
"shared_extra_conf": {},
|
||||||
|
"worker_extra_conf": "",
|
||||||
|
},
|
||||||
"account_data": {
|
"account_data": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["client", "replication"],
|
"listener_resources": ["client", "replication"],
|
||||||
@@ -305,13 +305,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"device_lists": {
|
|
||||||
"app": "synapse.app.generic_worker",
|
|
||||||
"listener_resources": ["client", "replication"],
|
|
||||||
"endpoint_patterns": [],
|
|
||||||
"shared_extra_conf": {},
|
|
||||||
"worker_extra_conf": "",
|
|
||||||
},
|
|
||||||
"typing": {
|
"typing": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": ["client", "replication"],
|
"listener_resources": ["client", "replication"],
|
||||||
@@ -328,15 +321,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"thread_subscriptions": {
|
|
||||||
"app": "synapse.app.generic_worker",
|
|
||||||
"listener_resources": ["client", "replication"],
|
|
||||||
"endpoint_patterns": [
|
|
||||||
"^/_matrix/client/unstable/io.element.msc4306/.*",
|
|
||||||
],
|
|
||||||
"shared_extra_conf": {},
|
|
||||||
"worker_extra_conf": "",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Templates for sections that may be inserted multiple times in config files
|
# Templates for sections that may be inserted multiple times in config files
|
||||||
@@ -367,11 +351,6 @@ def error(txt: str) -> NoReturn:
|
|||||||
|
|
||||||
|
|
||||||
def flush_buffers() -> None:
|
def flush_buffers() -> None:
|
||||||
"""
|
|
||||||
Python's `print()` buffers output by default, typically waiting until ~8KB
|
|
||||||
accumulates. This method can be used to flush the buffers so we can see the output
|
|
||||||
of any print statements so far.
|
|
||||||
"""
|
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
@@ -397,10 +376,8 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
|||||||
#
|
#
|
||||||
# We use append mode in case the files have already been written to by something else
|
# We use append mode in case the files have already been written to by something else
|
||||||
# (for instance, as part of the instructions in a dockerfile).
|
# (for instance, as part of the instructions in a dockerfile).
|
||||||
exists = os.path.isfile(dst)
|
|
||||||
with open(dst, "a") as outfile:
|
with open(dst, "a") as outfile:
|
||||||
# In case the existing file doesn't end with a newline
|
# In case the existing file doesn't end with a newline
|
||||||
if exists:
|
|
||||||
outfile.write("\n")
|
outfile.write("\n")
|
||||||
|
|
||||||
outfile.write(rendered)
|
outfile.write(rendered)
|
||||||
@@ -427,18 +404,16 @@ def add_worker_roles_to_shared_config(
|
|||||||
# streams
|
# streams
|
||||||
instance_map = shared_config.setdefault("instance_map", {})
|
instance_map = shared_config.setdefault("instance_map", {})
|
||||||
|
|
||||||
# This is a list of the stream_writers.
|
# This is a list of the stream_writers that there can be only one of. Events can be
|
||||||
stream_writers = {
|
# sharded, and therefore doesn't belong here.
|
||||||
|
singular_stream_writers = [
|
||||||
"account_data",
|
"account_data",
|
||||||
"events",
|
|
||||||
"device_lists",
|
|
||||||
"presence",
|
"presence",
|
||||||
"receipts",
|
"receipts",
|
||||||
"to_device",
|
"to_device",
|
||||||
"typing",
|
"typing",
|
||||||
"push_rules",
|
"push_rules",
|
||||||
"thread_subscriptions",
|
]
|
||||||
}
|
|
||||||
|
|
||||||
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
||||||
# roles, check each.
|
# roles, check each.
|
||||||
@@ -448,11 +423,28 @@ def add_worker_roles_to_shared_config(
|
|||||||
if "federation_sender" in worker_types_set:
|
if "federation_sender" in worker_types_set:
|
||||||
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
||||||
|
|
||||||
|
if "event_persister" in worker_types_set:
|
||||||
|
# Event persisters write to the events stream, so we need to update
|
||||||
|
# the list of event stream writers
|
||||||
|
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
||||||
|
worker_name
|
||||||
|
)
|
||||||
|
|
||||||
|
# Map of stream writer instance names to host/ports combos
|
||||||
|
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
||||||
|
instance_map[worker_name] = {
|
||||||
|
"path": f"/run/worker.{worker_port}",
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
instance_map[worker_name] = {
|
||||||
|
"host": "localhost",
|
||||||
|
"port": worker_port,
|
||||||
|
}
|
||||||
# Update the list of stream writers. It's convenient that the name of the worker
|
# Update the list of stream writers. It's convenient that the name of the worker
|
||||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||||
# is more than one.
|
# is more than one.
|
||||||
for worker in worker_types_set:
|
for worker in worker_types_set:
|
||||||
if worker in stream_writers:
|
if worker in singular_stream_writers:
|
||||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||||
worker, []
|
worker, []
|
||||||
).append(worker_name)
|
).append(worker_name)
|
||||||
@@ -612,7 +604,7 @@ def generate_base_homeserver_config() -> None:
|
|||||||
# start.py already does this for us, so just call that.
|
# start.py already does this for us, so just call that.
|
||||||
# note that this script is copied in in the official, monolith dockerfile
|
# note that this script is copied in in the official, monolith dockerfile
|
||||||
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
||||||
subprocess.run([sys.executable, "/start.py", "migrate_config"], check=True)
|
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||||
|
|
||||||
|
|
||||||
def parse_worker_types(
|
def parse_worker_types(
|
||||||
@@ -876,13 +868,6 @@ def generate_worker_files(
|
|||||||
else:
|
else:
|
||||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||||
|
|
||||||
# Special case for event_persister: those are just workers that write to
|
|
||||||
# the `events` stream. For other workers, the worker name is the same
|
|
||||||
# name of the stream they write to, but for some reason it is not the
|
|
||||||
# case for event_persister.
|
|
||||||
if "event_persister" in worker_types_set:
|
|
||||||
worker_types_set.add("events")
|
|
||||||
|
|
||||||
# Update the shared config with sharding-related options if necessary
|
# Update the shared config with sharding-related options if necessary
|
||||||
add_worker_roles_to_shared_config(
|
add_worker_roles_to_shared_config(
|
||||||
shared_config, worker_types_set, worker_name, worker_port
|
shared_config, worker_types_set, worker_name, worker_port
|
||||||
@@ -1013,7 +998,6 @@ def generate_worker_files(
|
|||||||
"/healthcheck.sh",
|
"/healthcheck.sh",
|
||||||
healthcheck_urls=healthcheck_urls,
|
healthcheck_urls=healthcheck_urls,
|
||||||
)
|
)
|
||||||
os.chmod("/healthcheck.sh", 0o755)
|
|
||||||
|
|
||||||
# Ensure the logging directory exists
|
# Ensure the logging directory exists
|
||||||
log_dir = data_dir + "/logs"
|
log_dir = data_dir + "/logs"
|
||||||
@@ -1115,13 +1099,6 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
else:
|
else:
|
||||||
log("Could not find %s, will not use" % (jemallocpath,))
|
log("Could not find %s, will not use" % (jemallocpath,))
|
||||||
|
|
||||||
# Empty strings are falsy in Python so this default is fine. We just can't have these
|
|
||||||
# be undefined because supervisord will complain about our
|
|
||||||
# `%(ENV_SYNAPSE_HTTP_PROXY)s` usage.
|
|
||||||
environ.setdefault("SYNAPSE_HTTP_PROXY", "")
|
|
||||||
environ.setdefault("SYNAPSE_HTTPS_PROXY", "")
|
|
||||||
environ.setdefault("SYNAPSE_NO_PROXY", "")
|
|
||||||
|
|
||||||
# Start supervisord, which will start Synapse, all of the configured worker
|
# Start supervisord, which will start Synapse, all of the configured worker
|
||||||
# processes, redis, nginx etc. according to the config we created above.
|
# processes, redis, nginx etc. according to the config we created above.
|
||||||
log("Starting supervisord")
|
log("Starting supervisord")
|
||||||
|
|||||||
@@ -10,9 +10,6 @@
|
|||||||
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
|
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
|
||||||
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
|
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
|
||||||
# confusion due to to interleaving of the different processes.
|
# confusion due to to interleaving of the different processes.
|
||||||
prefixer() {
|
exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1)
|
||||||
mawk -W interactive '{printf("%s | %s\n", ENVIRON["SUPERVISOR_PROCESS_NAME"], $0); fflush() }'
|
exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2)
|
||||||
}
|
|
||||||
exec 1> >(prefixer)
|
|
||||||
exec 2> >(prefixer >&2)
|
|
||||||
exec "$@"
|
exec "$@"
|
||||||
|
|||||||
@@ -22,11 +22,6 @@ def error(txt: str) -> NoReturn:
|
|||||||
|
|
||||||
|
|
||||||
def flush_buffers() -> None:
|
def flush_buffers() -> None:
|
||||||
"""
|
|
||||||
Python's `print()` buffers output by default, typically waiting until ~8KB
|
|
||||||
accumulates. This method can be used to flush the buffers so we can see the output
|
|
||||||
of any print statements so far.
|
|
||||||
"""
|
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
|||||||
@@ -63,18 +63,6 @@ mdbook serve
|
|||||||
|
|
||||||
The URL at which the docs can be viewed at will be logged.
|
The URL at which the docs can be viewed at will be logged.
|
||||||
|
|
||||||
## Synapse configuration documentation
|
|
||||||
|
|
||||||
The [Configuration
|
|
||||||
Manual](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html)
|
|
||||||
page is generated from a YAML file,
|
|
||||||
[schema/synapse-config.schema.yaml](../schema/synapse-config.schema.yaml). To
|
|
||||||
add new options or modify existing ones, first edit that file, then run
|
|
||||||
[scripts-dev/gen_config_documentation.py](../scripts-dev/gen_config_documentation.py)
|
|
||||||
to generate an updated Configuration Manual markdown file.
|
|
||||||
|
|
||||||
Build the book as described above to preview it in a web browser.
|
|
||||||
|
|
||||||
## Configuration and theming
|
## Configuration and theming
|
||||||
|
|
||||||
The look and behaviour of the website is configured by the [book.toml](../book.toml) file
|
The look and behaviour of the website is configured by the [book.toml](../book.toml) file
|
||||||
|
|||||||
@@ -49,8 +49,6 @@
|
|||||||
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
||||||
- [Account data callbacks](modules/account_data_callbacks.md)
|
- [Account data callbacks](modules/account_data_callbacks.md)
|
||||||
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
|
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
|
||||||
- [Media repository callbacks](modules/media_repository_callbacks.md)
|
|
||||||
- [Ratelimit callbacks](modules/ratelimit_callbacks.md)
|
|
||||||
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
||||||
- [Workers](workers.md)
|
- [Workers](workers.md)
|
||||||
- [Using `synctl` with Workers](synctl_workers.md)
|
- [Using `synctl` with Workers](synctl_workers.md)
|
||||||
@@ -68,13 +66,11 @@
|
|||||||
- [Registration Tokens](usage/administration/admin_api/registration_tokens.md)
|
- [Registration Tokens](usage/administration/admin_api/registration_tokens.md)
|
||||||
- [Manipulate Room Membership](admin_api/room_membership.md)
|
- [Manipulate Room Membership](admin_api/room_membership.md)
|
||||||
- [Rooms](admin_api/rooms.md)
|
- [Rooms](admin_api/rooms.md)
|
||||||
- [Scheduled tasks](admin_api/scheduled_tasks.md)
|
|
||||||
- [Server Notices](admin_api/server_notices.md)
|
- [Server Notices](admin_api/server_notices.md)
|
||||||
- [Statistics](admin_api/statistics.md)
|
- [Statistics](admin_api/statistics.md)
|
||||||
- [Users](admin_api/user_admin_api.md)
|
- [Users](admin_api/user_admin_api.md)
|
||||||
- [Server Version](admin_api/version_api.md)
|
- [Server Version](admin_api/version_api.md)
|
||||||
- [Federation](usage/administration/admin_api/federation.md)
|
- [Federation](usage/administration/admin_api/federation.md)
|
||||||
- [Client-Server API Extensions](admin_api/client_server_api_extensions.md)
|
|
||||||
- [Manhole](manhole.md)
|
- [Manhole](manhole.md)
|
||||||
- [Monitoring](metrics-howto.md)
|
- [Monitoring](metrics-howto.md)
|
||||||
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||||
|
|||||||
@@ -1,67 +0,0 @@
|
|||||||
# Client-Server API Extensions
|
|
||||||
|
|
||||||
Server administrators can set special account data to change how the Client-Server API behaves for
|
|
||||||
their clients. Setting the account data, or having it already set, as a non-admin has no effect.
|
|
||||||
|
|
||||||
All configuration options can be set through the `io.element.synapse.admin_client_config` global
|
|
||||||
account data on the admin's user account.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```
|
|
||||||
PUT /_matrix/client/v3/user/{adminUserId}/account_data/io.element.synapse.admin_client_config
|
|
||||||
{
|
|
||||||
"return_soft_failed_events": true
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## See soft failed events
|
|
||||||
|
|
||||||
Learn more about soft failure from [the spec](https://spec.matrix.org/v1.14/server-server-api/#soft-failure).
|
|
||||||
|
|
||||||
To receive soft failed events in APIs like `/sync` and `/messages`, set `return_soft_failed_events`
|
|
||||||
to `true` in the admin client config. When `false`, the normal behaviour of these endpoints is to
|
|
||||||
exclude soft failed events.
|
|
||||||
|
|
||||||
**Note**: If the policy server flagged the event as spam and that caused soft failure, that will be indicated
|
|
||||||
in the event's `unsigned` content like so:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "m.room.message",
|
|
||||||
"other": "event_fields_go_here",
|
|
||||||
"unsigned": {
|
|
||||||
"io.element.synapse.soft_failed": true,
|
|
||||||
"io.element.synapse.policy_server_spammy": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Default: `false`
|
|
||||||
|
|
||||||
## See events marked spammy by policy servers
|
|
||||||
|
|
||||||
Learn more about policy servers from [MSC4284](https://github.com/matrix-org/matrix-spec-proposals/pull/4284).
|
|
||||||
|
|
||||||
Similar to `return_soft_failed_events`, clients logged in with admin accounts can see events which were
|
|
||||||
flagged by the policy server as spammy (and thus soft failed) by setting `return_policy_server_spammy_events`
|
|
||||||
to `true`.
|
|
||||||
|
|
||||||
`return_policy_server_spammy_events` may be `true` while `return_soft_failed_events` is `false` to only see
|
|
||||||
policy server-flagged events. When `return_soft_failed_events` is `true` however, `return_policy_server_spammy_events`
|
|
||||||
is always `true`.
|
|
||||||
|
|
||||||
Events which were flagged by the policy will be flagged as `io.element.synapse.policy_server_spammy` in the
|
|
||||||
event's `unsigned` content, like so:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "m.room.message",
|
|
||||||
"other": "event_fields_go_here",
|
|
||||||
"unsigned": {
|
|
||||||
"io.element.synapse.soft_failed": true,
|
|
||||||
"io.element.synapse.policy_server_spammy": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Default: `true` if `return_soft_failed_events` is `true`, otherwise `false`
|
|
||||||
@@ -117,6 +117,7 @@ It returns a JSON body like the following:
|
|||||||
"hashes": {
|
"hashes": {
|
||||||
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
||||||
},
|
},
|
||||||
|
"origin": "matrix.org",
|
||||||
"origin_server_ts": 1592291711430,
|
"origin_server_ts": 1592291711430,
|
||||||
"prev_events": [
|
"prev_events": [
|
||||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
||||||
|
|||||||
@@ -46,14 +46,6 @@ to any local media, and any locally-cached copies of remote media.
|
|||||||
|
|
||||||
The media file itself (and any thumbnails) is not deleted from the server.
|
The media file itself (and any thumbnails) is not deleted from the server.
|
||||||
|
|
||||||
Since Synapse 1.128.0, hashes of uploaded media are tracked. If this media
|
|
||||||
is quarantined, Synapse will:
|
|
||||||
|
|
||||||
- Quarantine any media with a matching hash that has already been uploaded.
|
|
||||||
- Quarantine any future media.
|
|
||||||
- Quarantine any existing cached remote media.
|
|
||||||
- Quarantine any future remote media.
|
|
||||||
|
|
||||||
## Quarantining media by ID
|
## Quarantining media by ID
|
||||||
|
|
||||||
This API quarantines a single piece of local or remote media.
|
This API quarantines a single piece of local or remote media.
|
||||||
|
|||||||
@@ -794,7 +794,6 @@ A response body like the following is returned:
|
|||||||
"results": [
|
"results": [
|
||||||
{
|
{
|
||||||
"delete_id": "delete_id1",
|
"delete_id": "delete_id1",
|
||||||
"room_id": "!roomid:example.com",
|
|
||||||
"status": "failed",
|
"status": "failed",
|
||||||
"error": "error message",
|
"error": "error message",
|
||||||
"shutdown_room": {
|
"shutdown_room": {
|
||||||
@@ -805,8 +804,7 @@ A response body like the following is returned:
|
|||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
"delete_id": "delete_id2",
|
"delete_id": "delete_id2",
|
||||||
"room_id": "!roomid:example.com",
|
"status": "purging",
|
||||||
"status": "active",
|
|
||||||
"shutdown_room": {
|
"shutdown_room": {
|
||||||
"kicked_users": [
|
"kicked_users": [
|
||||||
"@foobar:example.com"
|
"@foobar:example.com"
|
||||||
@@ -843,9 +841,7 @@ A response body like the following is returned:
|
|||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"status": "active",
|
"status": "purging",
|
||||||
"delete_id": "bHkCNQpHqOaFhPtK",
|
|
||||||
"room_id": "!roomid:example.com",
|
|
||||||
"shutdown_room": {
|
"shutdown_room": {
|
||||||
"kicked_users": [
|
"kicked_users": [
|
||||||
"@foobar:example.com"
|
"@foobar:example.com"
|
||||||
@@ -873,11 +869,10 @@ The following fields are returned in the JSON response body:
|
|||||||
- `results` - An array of objects, each containing information about one task.
|
- `results` - An array of objects, each containing information about one task.
|
||||||
This field is omitted from the result when you query by `delete_id`.
|
This field is omitted from the result when you query by `delete_id`.
|
||||||
Task objects contain the following fields:
|
Task objects contain the following fields:
|
||||||
- `delete_id` - The ID for this purge
|
- `delete_id` - The ID for this purge if you query by `room_id`.
|
||||||
- `room_id` - The ID of the room being deleted
|
|
||||||
- `status` - The status will be one of:
|
- `status` - The status will be one of:
|
||||||
- `scheduled` - The deletion is waiting to be started
|
- `shutting_down` - The process is removing users from the room.
|
||||||
- `active` - The process is purging the room and event data from database.
|
- `purging` - The process is purging the room and event data from database.
|
||||||
- `complete` - The process has completed successfully.
|
- `complete` - The process has completed successfully.
|
||||||
- `failed` - The process is aborted, an error has occurred.
|
- `failed` - The process is aborted, an error has occurred.
|
||||||
- `error` - A string that shows an error message if `status` is `failed`.
|
- `error` - A string that shows an error message if `status` is `failed`.
|
||||||
|
|||||||
@@ -1,54 +0,0 @@
|
|||||||
# Show scheduled tasks
|
|
||||||
|
|
||||||
This API returns information about scheduled tasks.
|
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token`
|
|
||||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
|
||||||
|
|
||||||
The api is:
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v1/scheduled_tasks
|
|
||||||
```
|
|
||||||
|
|
||||||
It returns a JSON body like the following:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"scheduled_tasks": [
|
|
||||||
{
|
|
||||||
"id": "GSA124oegf1",
|
|
||||||
"action": "shutdown_room",
|
|
||||||
"status": "complete",
|
|
||||||
"timestamp_ms": 23423523,
|
|
||||||
"resource_id": "!roomid",
|
|
||||||
"result": "some result",
|
|
||||||
"error": null
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Query parameters:**
|
|
||||||
|
|
||||||
* `action_name`: string - Is optional. Returns only the scheduled tasks with the given action name.
|
|
||||||
* `resource_id`: string - Is optional. Returns only the scheduled tasks with the given resource id.
|
|
||||||
* `status`: string - Is optional. Returns only the scheduled tasks matching the given status, one of
|
|
||||||
- "scheduled" - Task is scheduled but not active
|
|
||||||
- "active" - Task is active and probably running, and if not will be run on next scheduler loop run
|
|
||||||
- "complete" - Task has completed successfully
|
|
||||||
- "failed" - Task is over and either returned a failed status, or had an exception
|
|
||||||
|
|
||||||
* `max_timestamp`: int - Is optional. Returns only the scheduled tasks with a timestamp inferior to the specified one.
|
|
||||||
|
|
||||||
**Response**
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body along with a `200` HTTP status code:
|
|
||||||
|
|
||||||
* `id`: string - ID of scheduled task.
|
|
||||||
* `action`: string - The name of the scheduled task's action.
|
|
||||||
* `status`: string - The status of the scheduled task.
|
|
||||||
* `timestamp_ms`: integer - The timestamp (in milliseconds since the unix epoch) of the given task - If the status is "scheduled" then this represents when it should be launched.
|
|
||||||
Otherwise it represents the last time this task got a change of state.
|
|
||||||
* `resource_id`: Optional string - The resource id of the scheduled task, if it possesses one
|
|
||||||
* `result`: Optional Json - Any result of the scheduled task, if given
|
|
||||||
* `error`: Optional string - If the task has the status "failed", the error associated with this failure
|
|
||||||
@@ -163,8 +163,7 @@ Body parameters:
|
|||||||
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
|
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
|
||||||
- `user_type` - **string** or null, optional. If not provided, the user type will be
|
- `user_type` - **string** or null, optional. If not provided, the user type will be
|
||||||
not be changed. If `null` is given, the user type will be cleared.
|
not be changed. If `null` is given, the user type will be cleared.
|
||||||
Other allowed options are: `bot` and `support` and any extra values defined in the homserver
|
Other allowed options are: `bot` and `support`.
|
||||||
[configuration](../usage/configuration/config_documentation.md#user_types).
|
|
||||||
|
|
||||||
## List Accounts
|
## List Accounts
|
||||||
### List Accounts (V2)
|
### List Accounts (V2)
|
||||||
@@ -415,32 +414,6 @@ The following actions are **NOT** performed. The list may be incomplete.
|
|||||||
- Remove from monthly active users
|
- Remove from monthly active users
|
||||||
- Remove user's consent information (consent version and timestamp)
|
- Remove user's consent information (consent version and timestamp)
|
||||||
|
|
||||||
## Suspend/Unsuspend Account
|
|
||||||
|
|
||||||
This API allows an admin to suspend/unsuspend an account. While an account is suspended, the user is
|
|
||||||
prohibited from sending invites, joining or knocking on rooms, sending messages, changing profile data, and redacting messages other than their own.
|
|
||||||
|
|
||||||
The api is:
|
|
||||||
|
|
||||||
```
|
|
||||||
PUT /_synapse/admin/v1/suspend/<user_id>
|
|
||||||
```
|
|
||||||
|
|
||||||
with a body of:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"suspend": true
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
To unsuspend a user, use the same endpoint with a body of:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"suspend": false
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Reset password
|
## Reset password
|
||||||
|
|
||||||
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||||
@@ -955,8 +928,7 @@ A response body like the following is returned:
|
|||||||
"last_seen_ip": "1.2.3.4",
|
"last_seen_ip": "1.2.3.4",
|
||||||
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
||||||
"last_seen_ts": 1474491775024,
|
"last_seen_ts": 1474491775024,
|
||||||
"user_id": "<user_id>",
|
"user_id": "<user_id>"
|
||||||
"dehydrated": false
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"device_id": "AUIECTSRND",
|
"device_id": "AUIECTSRND",
|
||||||
@@ -964,8 +936,7 @@ A response body like the following is returned:
|
|||||||
"last_seen_ip": "1.2.3.5",
|
"last_seen_ip": "1.2.3.5",
|
||||||
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
||||||
"last_seen_ts": 1474491775025,
|
"last_seen_ts": 1474491775025,
|
||||||
"user_id": "<user_id>",
|
"user_id": "<user_id>"
|
||||||
"dehydrated": false
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"total": 2
|
"total": 2
|
||||||
@@ -995,7 +966,6 @@ The following fields are returned in the JSON response body:
|
|||||||
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
|
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
|
||||||
devices was last seen. (May be a few minutes out of date, for efficiency reasons).
|
devices was last seen. (May be a few minutes out of date, for efficiency reasons).
|
||||||
- `user_id` - Owner of device.
|
- `user_id` - Owner of device.
|
||||||
- `dehydrated` - Whether the device is a dehydrated device.
|
|
||||||
|
|
||||||
- `total` - Total number of user's devices.
|
- `total` - Total number of user's devices.
|
||||||
|
|
||||||
@@ -1464,11 +1434,8 @@ _Added in Synapse 1.72.0._
|
|||||||
|
|
||||||
## Redact all the events of a user
|
## Redact all the events of a user
|
||||||
|
|
||||||
This endpoint allows an admin to redact the events of a given user. There are no restrictions on
|
This endpoint allows an admin to redact the events of a given user. There are no restrictions on redactions for a
|
||||||
redactions for a local user. By default, we puppet the user who sent the message to redact it themselves.
|
local user. By default, we puppet the user who sent the message to redact it themselves. Redactions for non-local users are issued using the admin user, and will fail in rooms where the admin user is not admin/does not have the specified power level to issue redactions.
|
||||||
Redactions for non-local users are issued using the admin user, and will fail in rooms where the
|
|
||||||
admin user is not admin/does not have the specified power level to issue redactions. An option
|
|
||||||
is provided to override the default and allow the admin to issue the redactions in all cases.
|
|
||||||
|
|
||||||
The API is
|
The API is
|
||||||
```
|
```
|
||||||
@@ -1501,15 +1468,12 @@ The following JSON body parameter must be provided:
|
|||||||
- `rooms` - A list of rooms to redact the user's events in. If an empty list is provided all events in all rooms
|
- `rooms` - A list of rooms to redact the user's events in. If an empty list is provided all events in all rooms
|
||||||
the user is a member of will be redacted
|
the user is a member of will be redacted
|
||||||
|
|
||||||
|
_Added in Synapse 1.116.0._
|
||||||
|
|
||||||
The following JSON body parameters are optional:
|
The following JSON body parameters are optional:
|
||||||
|
|
||||||
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
|
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
|
||||||
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided.
|
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
|
||||||
- `use_admin` - If set to `true`, the admin user is used to issue the redactions, rather than puppeting the user. Useful
|
|
||||||
when the admin is also the moderator of the rooms that require redactions. Note that the redactions will fail in rooms
|
|
||||||
where the admin does not have the sufficient power level to issue the redactions.
|
|
||||||
|
|
||||||
_Added in Synapse 1.116.0._
|
|
||||||
|
|
||||||
|
|
||||||
## Check the status of a redaction process
|
## Check the status of a redaction process
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
# Deprecation Policy
|
Deprecation Policy for Platform Dependencies
|
||||||
|
============================================
|
||||||
|
|
||||||
Synapse has a number of **platform dependencies** (Python, Rust, PostgreSQL, and SQLite)
|
Synapse has a number of platform dependencies, including Python, Rust,
|
||||||
and **application dependencies** (Python and Rust packages). This document outlines the
|
PostgreSQL and SQLite. This document outlines the policy towards which versions
|
||||||
policy towards which versions we support, and when we drop support for versions in the
|
we support, and when we drop support for versions in the future.
|
||||||
future.
|
|
||||||
|
|
||||||
## Platform Dependencies
|
|
||||||
|
Policy
|
||||||
|
------
|
||||||
|
|
||||||
Synapse follows the upstream support life cycles for Python and PostgreSQL,
|
Synapse follows the upstream support life cycles for Python and PostgreSQL,
|
||||||
i.e. when a version reaches End of Life Synapse will withdraw support for that
|
i.e. when a version reaches End of Life Synapse will withdraw support for that
|
||||||
@@ -24,8 +26,8 @@ The oldest supported version of SQLite is the version
|
|||||||
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
||||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||||
|
|
||||||
|
Context
|
||||||
### Context
|
-------
|
||||||
|
|
||||||
It is important for system admins to have a clear understanding of the platform
|
It is important for system admins to have a clear understanding of the platform
|
||||||
requirements of Synapse and its deprecation policies so that they can
|
requirements of Synapse and its deprecation policies so that they can
|
||||||
@@ -49,41 +51,3 @@ On a similar note, SQLite does not generally have a concept of "supported
|
|||||||
release"; bugfixes are published for the latest minor release only. We chose to
|
release"; bugfixes are published for the latest minor release only. We chose to
|
||||||
track Debian's oldstable as this is relatively conservative, predictably updated
|
track Debian's oldstable as this is relatively conservative, predictably updated
|
||||||
and is consistent with the `.deb` packages released by Matrix.org.
|
and is consistent with the `.deb` packages released by Matrix.org.
|
||||||
|
|
||||||
|
|
||||||
## Application dependencies
|
|
||||||
|
|
||||||
For application-level Python dependencies, we often specify loose version constraints
|
|
||||||
(ex. `>=X.Y.Z`) to be forwards compatible with any new versions. Upper bounds (`<A.B.C`)
|
|
||||||
are only added when necessary to prevent known incompatibilities.
|
|
||||||
|
|
||||||
When selecting a minimum version, while we are mindful of the impact on downstream
|
|
||||||
package maintainers, our primary focus is on the maintainability and progress of Synapse
|
|
||||||
itself.
|
|
||||||
|
|
||||||
For developers, a Python dependency version can be considered a "no-brainer" upgrade once it is
|
|
||||||
available in both the latest [Debian Stable](https://packages.debian.org/stable/) and
|
|
||||||
[Ubuntu LTS](https://launchpad.net/ubuntu) repositories. No need to burden yourself with
|
|
||||||
extra scrutiny or consideration at this point.
|
|
||||||
|
|
||||||
We aggressively update Rust dependencies. Since these are statically linked and managed
|
|
||||||
entirely by `cargo` during build, they *can* pose no ongoing maintenance burden on others.
|
|
||||||
This allows us to freely upgrade to leverage the latest ecosystem advancements assuming
|
|
||||||
they don't have their own system-level dependencies.
|
|
||||||
|
|
||||||
|
|
||||||
### Context
|
|
||||||
|
|
||||||
Because Python dependencies can easily be managed in a virtual environment, we are less
|
|
||||||
concerned about the criteria for selecting minimum versions. The only thing of concern
|
|
||||||
is making sure we're not making it unnecessarily difficult for downstream package
|
|
||||||
maintainers. Generally, this just means avoiding the bleeding edge for a few months.
|
|
||||||
|
|
||||||
The situation for Rust dependencies is fundamentally different. For packagers, the
|
|
||||||
concerns around Python dependency versions do not apply. The `cargo` tool handles
|
|
||||||
downloading and building all libraries to satisfy dependencies, and these libraries are
|
|
||||||
statically linked into the final binary. This means that from a packager's perspective,
|
|
||||||
the Rust dependency versions are an internal build detail, not a runtime dependency to
|
|
||||||
be managed on the target system. Consequently, we have even greater flexibility to
|
|
||||||
upgrade Rust dependencies as needed for the project. Some distros (e.g. Fedora) do
|
|
||||||
package Rust libraries, but this appears to be the outlier rather than the norm.
|
|
||||||
|
|||||||
@@ -29,6 +29,8 @@ easiest way of installing the latest version is to use [rustup](https://rustup.r
|
|||||||
|
|
||||||
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
|
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
|
||||||
|
|
||||||
|
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
|
||||||
|
|
||||||
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
||||||
|
|
||||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||||
|
|||||||
@@ -162,7 +162,7 @@ by a unique name, the current status (stored in JSON), and some dependency infor
|
|||||||
* Whether the update requires a previous update to be complete.
|
* Whether the update requires a previous update to be complete.
|
||||||
* A rough ordering for which to complete updates.
|
* A rough ordering for which to complete updates.
|
||||||
|
|
||||||
A new background update needs to be added to the `background_updates` table:
|
A new background updates needs to be added to the `background_updates` table:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES
|
INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES
|
||||||
|
|||||||
@@ -150,25 +150,6 @@ $ poetry shell
|
|||||||
$ poetry install --extras all
|
$ poetry install --extras all
|
||||||
```
|
```
|
||||||
|
|
||||||
If you want to go even further and remove the Poetry caches:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
# Find your Poetry cache directory
|
|
||||||
# Docs: https://github.com/python-poetry/poetry/blob/main/docs/configuration.md#cache-directory
|
|
||||||
$ poetry config cache-dir
|
|
||||||
|
|
||||||
# Remove packages from all cached repositories
|
|
||||||
$ poetry cache clear --all .
|
|
||||||
|
|
||||||
# Go completely nuclear and clear out everything Poetry cache related
|
|
||||||
# including the wheel artifacts which is not covered by the above command
|
|
||||||
# (see https://github.com/python-poetry/poetry/issues/10304)
|
|
||||||
#
|
|
||||||
# This is necessary in order to rebuild or fetch new wheels.
|
|
||||||
$ rm -rf $(poetry config cache-dir)
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## ...run a command in the `poetry` virtualenv?
|
## ...run a command in the `poetry` virtualenv?
|
||||||
|
|
||||||
Use `poetry run cmd args` when you need the python virtualenv context.
|
Use `poetry run cmd args` when you need the python virtualenv context.
|
||||||
@@ -206,7 +187,7 @@ useful.
|
|||||||
## ...add a new dependency?
|
## ...add a new dependency?
|
||||||
|
|
||||||
Either:
|
Either:
|
||||||
- manually update `pyproject.toml`; then `poetry lock`; or else
|
- manually update `pyproject.toml`; then `poetry lock --no-update`; or else
|
||||||
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
|
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
|
||||||
`--extras` and `--optional` flags in particular.
|
`--extras` and `--optional` flags in particular.
|
||||||
|
|
||||||
@@ -221,12 +202,12 @@ poetry remove packagename
|
|||||||
```
|
```
|
||||||
|
|
||||||
ought to do the trick. Alternatively, manually update `pyproject.toml` and
|
ought to do the trick. Alternatively, manually update `pyproject.toml` and
|
||||||
`poetry lock`. Include the updated `pyproject.toml` and `poetry.lock`
|
`poetry lock --no-update`. Include the updated `pyproject.toml` and `poetry.lock`
|
||||||
files in your commit.
|
files in your commit.
|
||||||
|
|
||||||
## ...update the version range for an existing dependency?
|
## ...update the version range for an existing dependency?
|
||||||
|
|
||||||
Best done by manually editing `pyproject.toml`, then `poetry lock`.
|
Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`.
|
||||||
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
|
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
|
||||||
|
|
||||||
## ...update a dependency in the locked environment?
|
## ...update a dependency in the locked environment?
|
||||||
@@ -252,7 +233,7 @@ poetry add packagename==1.2.3
|
|||||||
|
|
||||||
# Get poetry to recompute the content-hash of pyproject.toml without changing
|
# Get poetry to recompute the content-hash of pyproject.toml without changing
|
||||||
# the locked package versions.
|
# the locked package versions.
|
||||||
poetry lock
|
poetry lock --no-update
|
||||||
```
|
```
|
||||||
|
|
||||||
Either way, include the updated `poetry.lock` file in your commit.
|
Either way, include the updated `poetry.lock` file in your commit.
|
||||||
|
|||||||
@@ -59,28 +59,6 @@ def do_request_handling():
|
|||||||
logger.debug("phew")
|
logger.debug("phew")
|
||||||
```
|
```
|
||||||
|
|
||||||
### The `sentinel` context
|
|
||||||
|
|
||||||
The default logcontext is `synapse.logging.context.SENTINEL_CONTEXT`, which is an empty
|
|
||||||
sentinel value to represent the root logcontext. This is what is used when there is no
|
|
||||||
other logcontext set. The phrase "clear/reset the logcontext" means to set the current
|
|
||||||
logcontext to the `sentinel` logcontext.
|
|
||||||
|
|
||||||
No CPU/database usage metrics are recorded against the `sentinel` logcontext.
|
|
||||||
|
|
||||||
Ideally, nothing from the Synapse homeserver would be logged against the `sentinel`
|
|
||||||
logcontext as we want to know which server the logs came from. In practice, this is not
|
|
||||||
always the case yet especially outside of request handling.
|
|
||||||
|
|
||||||
Global things outside of Synapse (e.g. Twisted reactor code) should run in the
|
|
||||||
`sentinel` logcontext. It's only when it calls into application code that a logcontext
|
|
||||||
gets activated. This means the reactor should be started in the `sentinel` logcontext,
|
|
||||||
and any time an awaitable yields control back to the reactor, it should reset the
|
|
||||||
logcontext to be the `sentinel` logcontext. This is important to avoid leaking the
|
|
||||||
current logcontext to the reactor (which would then get picked up and associated with
|
|
||||||
the next thing the reactor does).
|
|
||||||
|
|
||||||
|
|
||||||
## Using logcontexts with awaitables
|
## Using logcontexts with awaitables
|
||||||
|
|
||||||
Awaitables break the linear flow of code so that there is no longer a single entry point
|
Awaitables break the linear flow of code so that there is no longer a single entry point
|
||||||
|
|||||||
@@ -1,131 +0,0 @@
|
|||||||
# Media repository callbacks
|
|
||||||
|
|
||||||
Media repository callbacks allow module developers to customise the behaviour of the
|
|
||||||
media repository on a per user basis. Media repository callbacks can be registered
|
|
||||||
using the module API's `register_media_repository_callbacks` method.
|
|
||||||
|
|
||||||
The available media repository callbacks are:
|
|
||||||
|
|
||||||
### `get_media_config_for_user`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.132.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def get_media_config_for_user(user_id: str) -> Optional[JsonDict]
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental . The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when processing a request from a client for the
|
|
||||||
[media config endpoint](https://spec.matrix.org/latest/client-server-api/#get_matrixclientv1mediaconfig).
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
|
|
||||||
If the callback returns a dictionary then it will be used as the body of the response to the
|
|
||||||
client.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
|
||||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
|
||||||
any of the subsequent implementations of this callback.
|
|
||||||
|
|
||||||
If no module returns a non-`None` value then the default media config will be returned.
|
|
||||||
|
|
||||||
### `is_user_allowed_to_upload_media_of_size`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.132.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def is_user_allowed_to_upload_media_of_size(user_id: str, size: int) -> bool
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental . The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called before media is accepted for upload from a user, in case the module needs to
|
|
||||||
enforce a different limit for the particular user.
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
* `size`: The size in bytes of media that is being requested to upload.
|
|
||||||
|
|
||||||
If the module returns `False`, the current request will be denied with the error code
|
|
||||||
`M_TOO_LARGE` and the HTTP status code 413.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a callback
|
|
||||||
returns `True`, Synapse falls through to the next one. The value of the first callback that
|
|
||||||
returns `False` will be used. If this happens, Synapse will not call any of the subsequent
|
|
||||||
implementations of this callback.
|
|
||||||
|
|
||||||
### `get_media_upload_limits_for_user`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.139.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def get_media_upload_limits_for_user(user_id: str, size: int) -> Optional[List[synapse.module_api.MediaUploadLimit]]
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental. The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when processing a request to store content in the media repository. This can be used to dynamically override
|
|
||||||
the [media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits).
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
|
|
||||||
If the callback returns a list then it will be used as the limits instead of those in the configuration (if any).
|
|
||||||
|
|
||||||
If an empty list is returned then no limits are applied (**warning:** users will be able
|
|
||||||
to upload as much data as they desire).
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
|
||||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
|
||||||
any of the subsequent implementations of this callback.
|
|
||||||
|
|
||||||
If there are no registered modules, or if all modules return `None`, then
|
|
||||||
the default
|
|
||||||
[media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits)
|
|
||||||
will be used.
|
|
||||||
|
|
||||||
### `on_media_upload_limit_exceeded`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.139.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def on_media_upload_limit_exceeded(user_id: str, limit: synapse.module_api.MediaUploadLimit, sent_bytes: int, attempted_bytes: int) -> None
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental. The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when a user attempts to upload media that would exceed a
|
|
||||||
[configured media upload limit](../usage/configuration/config_documentation.html#media_upload_limits).
|
|
||||||
|
|
||||||
This callback will only be called on workers which handle
|
|
||||||
[POST /_matrix/media/v3/upload](https://spec.matrix.org/v1.15/client-server-api/#post_matrixmediav3upload)
|
|
||||||
requests.
|
|
||||||
|
|
||||||
This could be used to inform the user that they have reached a media upload limit through
|
|
||||||
some external method.
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
* `limit`: The `synapse.module_api.MediaUploadLimit` representing the limit that was reached.
|
|
||||||
* `sent_bytes`: The number of bytes already sent during the period of the limit.
|
|
||||||
* `attempted_bytes`: The number of bytes that the user attempted to send.
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
# Ratelimit callbacks
|
|
||||||
|
|
||||||
Ratelimit callbacks allow module developers to override ratelimit settings dynamically whilst
|
|
||||||
Synapse is running. Ratelimit callbacks can be registered using the module API's
|
|
||||||
`register_ratelimit_callbacks` method.
|
|
||||||
|
|
||||||
The available ratelimit callbacks are:
|
|
||||||
|
|
||||||
### `get_ratelimit_override_for_user`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.132.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> Optional[synapse.module_api.RatelimitOverride]
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental . The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when constructing a ratelimiter of a particular type for a user. The module can
|
|
||||||
return a `messages_per_second` and `burst_count` to be used, or `None` if
|
|
||||||
the default settings are adequate. The user is represented by their Matrix user ID
|
|
||||||
(e.g. `@alice:example.com`). The limiter name is usually taken from the `RatelimitSettings` key
|
|
||||||
value.
|
|
||||||
|
|
||||||
The limiters that are currently supported are:
|
|
||||||
|
|
||||||
- `rc_invites.per_room`
|
|
||||||
- `rc_invites.per_user`
|
|
||||||
- `rc_invites.per_issuer`
|
|
||||||
|
|
||||||
The `RatelimitOverride` return type has the following fields:
|
|
||||||
|
|
||||||
- `per_second: float`. The number of actions that can be performed in a second. `0.0` means that ratelimiting is disabled.
|
|
||||||
- `burst_count: int`. The number of actions that can be performed before being limited.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
|
||||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
|
||||||
any of the subsequent implementations of this callback. If no module returns a non-`None` value
|
|
||||||
then the default settings will be used.
|
|
||||||
@@ -80,8 +80,6 @@ Called when processing an invitation, both when one is created locally or when
|
|||||||
receiving an invite over federation. Both inviter and invitee are represented by
|
receiving an invite over federation. Both inviter and invitee are represented by
|
||||||
their Matrix user ID (e.g. `@alice:example.com`).
|
their Matrix user ID (e.g. `@alice:example.com`).
|
||||||
|
|
||||||
Note that federated invites will call `federated_user_may_invite` before this callback.
|
|
||||||
|
|
||||||
|
|
||||||
The callback must return one of:
|
The callback must return one of:
|
||||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||||
@@ -99,34 +97,6 @@ be used. If this happens, Synapse will not call any of the subsequent implementa
|
|||||||
this callback.
|
this callback.
|
||||||
|
|
||||||
|
|
||||||
### `federated_user_may_invite`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.133.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def federated_user_may_invite(event: "synapse.events.EventBase") -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
|
||||||
```
|
|
||||||
|
|
||||||
Called when processing an invitation received over federation. Unlike `user_may_invite`,
|
|
||||||
this callback receives the entire event, including any stripped state in the `unsigned`
|
|
||||||
section, not just the room and user IDs.
|
|
||||||
|
|
||||||
The callback must return one of:
|
|
||||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
|
||||||
decide to reject it.
|
|
||||||
- `synapse.module_api.errors.Codes` to reject the operation with an error code. In case
|
|
||||||
of doubt, `synapse.module_api.errors.Codes.FORBIDDEN` is a good error code.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one.
|
|
||||||
The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will
|
|
||||||
be used. If this happens, Synapse will not call any of the subsequent implementations of
|
|
||||||
this callback.
|
|
||||||
|
|
||||||
If all of the callbacks return `synapse.module_api.NOT_SPAM`, Synapse will also fall
|
|
||||||
through to the `user_may_invite` callback before approving the invite.
|
|
||||||
|
|
||||||
|
|
||||||
### `user_may_send_3pid_invite`
|
### `user_may_send_3pid_invite`
|
||||||
|
|
||||||
_First introduced in Synapse v1.45.0_
|
_First introduced in Synapse v1.45.0_
|
||||||
@@ -189,19 +159,12 @@ _First introduced in Synapse v1.37.0_
|
|||||||
|
|
||||||
_Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_api.errors.Codes` can be returned by this callback. Returning a boolean is now deprecated._
|
_Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_api.errors.Codes` can be returned by this callback. Returning a boolean is now deprecated._
|
||||||
|
|
||||||
_Changed in Synapse v1.132.0: Added the `room_config` argument. Callbacks that only expect a single `user_id` argument are still supported._
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def user_may_create_room(user_id: str, room_config: synapse.module_api.JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
async def user_may_create_room(user_id: str) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when processing a room creation request.
|
Called when processing a room creation request.
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`).
|
|
||||||
* `room_config`: The contents of the body of a [/createRoom request](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
|
||||||
|
|
||||||
The callback must return one of:
|
The callback must return one of:
|
||||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||||
decide to reject it.
|
decide to reject it.
|
||||||
@@ -276,41 +239,6 @@ be used. If this happens, Synapse will not call any of the subsequent implementa
|
|||||||
this callback.
|
this callback.
|
||||||
|
|
||||||
|
|
||||||
### `user_may_send_state_event`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.132.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def user_may_send_state_event(user_id: str, room_id: str, event_type: str, state_key: str, content: JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental . The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when processing a request to [send state events](https://spec.matrix.org/latest/client-server-api/#put_matrixclientv3roomsroomidstateeventtypestatekey) to a room.
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) sending the state event.
|
|
||||||
* `room_id`: The ID of the room that the requested state event is being sent to.
|
|
||||||
* `event_type`: The requested type of event.
|
|
||||||
* `state_key`: The requested state key.
|
|
||||||
* `content`: The requested event contents.
|
|
||||||
|
|
||||||
The callback must return one of:
|
|
||||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
|
||||||
decide to reject it.
|
|
||||||
- `synapse.module_api.errors.Codes` to reject the operation with an error code. In case
|
|
||||||
of doubt, `synapse.module_api.errors.Codes.FORBIDDEN` is a good error code.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one.
|
|
||||||
The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will
|
|
||||||
be used. If this happens, Synapse will not call any of the subsequent implementations of
|
|
||||||
this callback.
|
|
||||||
|
|
||||||
|
|
||||||
### `check_username_for_spam`
|
### `check_username_for_spam`
|
||||||
|
|
||||||
@@ -425,8 +353,6 @@ callback returns `False`, Synapse falls through to the next one. The value of th
|
|||||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||||
any of the subsequent implementations of this callback.
|
any of the subsequent implementations of this callback.
|
||||||
|
|
||||||
Note that this check is applied to federation invites as of Synapse v1.130.0.
|
|
||||||
|
|
||||||
|
|
||||||
### `check_login_for_spam`
|
### `check_login_for_spam`
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ such as [Github][github-idp].
|
|||||||
[auth0]: https://auth0.com/
|
[auth0]: https://auth0.com/
|
||||||
[authentik]: https://goauthentik.io/
|
[authentik]: https://goauthentik.io/
|
||||||
[lemonldap]: https://lemonldap-ng.org/
|
[lemonldap]: https://lemonldap-ng.org/
|
||||||
[pocket-id]: https://pocket-id.org/
|
|
||||||
[okta]: https://www.okta.com/
|
[okta]: https://www.okta.com/
|
||||||
[dex-idp]: https://github.com/dexidp/dex
|
[dex-idp]: https://github.com/dexidp/dex
|
||||||
[keycloak-idp]: https://www.keycloak.org/docs/latest/server_admin/#sso-protocols
|
[keycloak-idp]: https://www.keycloak.org/docs/latest/server_admin/#sso-protocols
|
||||||
@@ -186,7 +185,6 @@ oidc_providers:
|
|||||||
4. Note the slug of your application, Client ID and Client Secret.
|
4. Note the slug of your application, Client ID and Client Secret.
|
||||||
|
|
||||||
Note: RSA keys must be used for signing for Authentik, ECC keys do not work.
|
Note: RSA keys must be used for signing for Authentik, ECC keys do not work.
|
||||||
Note: The provider must have a signing key set and must not use an encryption key.
|
|
||||||
|
|
||||||
Synapse config:
|
Synapse config:
|
||||||
```yaml
|
```yaml
|
||||||
@@ -205,12 +203,6 @@ oidc_providers:
|
|||||||
config:
|
config:
|
||||||
localpart_template: "{{ user.preferred_username }}"
|
localpart_template: "{{ user.preferred_username }}"
|
||||||
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
|
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
|
||||||
[...]
|
|
||||||
jwt_config:
|
|
||||||
enabled: true
|
|
||||||
secret: "your client secret" # TO BE FILLED (same as `client_secret` above)
|
|
||||||
algorithm: "RS256"
|
|
||||||
# (...other fields)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Dex
|
### Dex
|
||||||
@@ -632,32 +624,6 @@ oidc_providers:
|
|||||||
|
|
||||||
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
||||||
|
|
||||||
### Pocket ID
|
|
||||||
|
|
||||||
[Pocket ID][pocket-id] is a simple OIDC provider that allows users to authenticate with their passkeys.
|
|
||||||
1. Go to `OIDC Clients`
|
|
||||||
2. Click on `Add OIDC Client`
|
|
||||||
3. Add a name, for example `Synapse`
|
|
||||||
4. Add `"https://auth.example.org/_synapse/client/oidc/callback` to `Callback URLs` # Replace `auth.example.org` with your domain
|
|
||||||
5. Click on `Save`
|
|
||||||
6. Note down your `Client ID` and `Client secret`, these will be used later
|
|
||||||
|
|
||||||
Synapse config:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
oidc_providers:
|
|
||||||
- idp_id: pocket_id
|
|
||||||
idp_name: Pocket ID
|
|
||||||
issuer: "https://auth.example.org/" # Replace with your domain
|
|
||||||
client_id: "your-client-id" # Replace with the "Client ID" you noted down before
|
|
||||||
client_secret: "your-client-secret" # Replace with the "Client secret" you noted down before
|
|
||||||
scopes: ["openid", "profile"]
|
|
||||||
user_mapping_provider:
|
|
||||||
config:
|
|
||||||
localpart_template: "{{ user.preferred_username }}"
|
|
||||||
display_name_template: "{{ user.name }}"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Shibboleth with OIDC Plugin
|
### Shibboleth with OIDC Plugin
|
||||||
|
|
||||||
[Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities.
|
[Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities.
|
||||||
|
|||||||
@@ -100,14 +100,6 @@ database:
|
|||||||
keepalives_count: 3
|
keepalives_count: 3
|
||||||
```
|
```
|
||||||
|
|
||||||
## Postgresql major version upgrades
|
|
||||||
|
|
||||||
Postgres uses separate directories for database locations between major versions (typically `/var/lib/postgresql/<version>/main`).
|
|
||||||
|
|
||||||
Therefore, it is recommended to stop Synapse and other services (MAS, etc) before upgrading Postgres major versions.
|
|
||||||
|
|
||||||
It is also strongly recommended to [back up](./usage/administration/backups.md#database) your database beforehand to ensure no data loss arising from a failed upgrade.
|
|
||||||
|
|
||||||
## Backups
|
## Backups
|
||||||
|
|
||||||
Don't forget to [back up](./usage/administration/backups.md#database) your database!
|
Don't forget to [back up](./usage/administration/backups.md#database) your database!
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ It is recommended to put a reverse proxy such as
|
|||||||
[Apache](https://httpd.apache.org/docs/current/mod/mod_proxy_http.html),
|
[Apache](https://httpd.apache.org/docs/current/mod/mod_proxy_http.html),
|
||||||
[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy),
|
[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy),
|
||||||
[HAProxy](https://www.haproxy.org/) or
|
[HAProxy](https://www.haproxy.org/) or
|
||||||
[relayd](https://man.openbsd.org/relayd.8) in front of Synapse.
|
[relayd](https://man.openbsd.org/relayd.8) in front of Synapse. One advantage
|
||||||
This has the advantage of being able to expose the default HTTPS port (443) to Matrix
|
of doing so is that it means that you can expose the default https port
|
||||||
clients without requiring Synapse to bind to a privileged port (port numbers less than
|
(443) to Matrix clients without needing to run Synapse with root
|
||||||
1024), avoiding the need for `CAP_NET_BIND_SERVICE` or running as root.
|
privileges.
|
||||||
|
|
||||||
You should configure your reverse proxy to forward requests to `/_matrix` or
|
You should configure your reverse proxy to forward requests to `/_matrix` or
|
||||||
`/_synapse/client` to Synapse, and have it set the `X-Forwarded-For` and
|
`/_synapse/client` to Synapse, and have it set the `X-Forwarded-For` and
|
||||||
|
|||||||
@@ -7,23 +7,8 @@ proxy is supported, not SOCKS proxy or anything else.
|
|||||||
|
|
||||||
## Configure
|
## Configure
|
||||||
|
|
||||||
The proxy settings can be configured in the homeserver configuration file via
|
The `http_proxy`, `https_proxy`, `no_proxy` environment variables are used to
|
||||||
[`http_proxy`](../usage/configuration/config_documentation.md#http_proxy),
|
specify proxy settings. The environment variable is not case sensitive.
|
||||||
[`https_proxy`](../usage/configuration/config_documentation.md#https_proxy), and
|
|
||||||
[`no_proxy_hosts`](../usage/configuration/config_documentation.md#no_proxy_hosts).
|
|
||||||
|
|
||||||
`homeserver.yaml` example:
|
|
||||||
```yaml
|
|
||||||
http_proxy: http://USERNAME:PASSWORD@10.0.1.1:8080/
|
|
||||||
https_proxy: http://USERNAME:PASSWORD@proxy.example.com:8080/
|
|
||||||
no_proxy_hosts:
|
|
||||||
- master.hostname.example.com
|
|
||||||
- 10.1.0.0/16
|
|
||||||
- 172.30.0.0/16
|
|
||||||
```
|
|
||||||
|
|
||||||
The proxy settings can also be configured via the `http_proxy`, `https_proxy`,
|
|
||||||
`no_proxy` environment variables. The environment variable is not case sensitive.
|
|
||||||
- `http_proxy`: Proxy server to use for HTTP requests.
|
- `http_proxy`: Proxy server to use for HTTP requests.
|
||||||
- `https_proxy`: Proxy server to use for HTTPS requests.
|
- `https_proxy`: Proxy server to use for HTTPS requests.
|
||||||
- `no_proxy`: Comma-separated list of hosts, IP addresses, or IP ranges in CIDR
|
- `no_proxy`: Comma-separated list of hosts, IP addresses, or IP ranges in CIDR
|
||||||
@@ -59,7 +44,7 @@ The proxy will be **used** for:
|
|||||||
- phone-home stats
|
- phone-home stats
|
||||||
- recaptcha validation
|
- recaptcha validation
|
||||||
- CAS auth validation
|
- CAS auth validation
|
||||||
- OpenID Connect (OIDC)
|
- OpenID Connect
|
||||||
- Outbound federation
|
- Outbound federation
|
||||||
- Federation (checking public key revocation)
|
- Federation (checking public key revocation)
|
||||||
- Fetching public keys of other servers
|
- Fetching public keys of other servers
|
||||||
@@ -68,7 +53,7 @@ The proxy will be **used** for:
|
|||||||
It will **not be used** for:
|
It will **not be used** for:
|
||||||
|
|
||||||
- Application Services
|
- Application Services
|
||||||
- Matrix Identity servers
|
- Identity servers
|
||||||
- In worker configurations
|
- In worker configurations
|
||||||
- connections between workers
|
- connections between workers
|
||||||
- connections from workers to Redis
|
- connections from workers to Redis
|
||||||
|
|||||||
@@ -286,7 +286,7 @@ Installing prerequisites on Ubuntu or Debian:
|
|||||||
```sh
|
```sh
|
||||||
sudo apt install build-essential python3-dev libffi-dev \
|
sudo apt install build-essential python3-dev libffi-dev \
|
||||||
python3-pip python3-setuptools sqlite3 \
|
python3-pip python3-setuptools sqlite3 \
|
||||||
libssl-dev virtualenv libjpeg-dev libxslt1-dev
|
libssl-dev virtualenv libjpeg-dev libxslt1-dev libicu-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
##### ArchLinux
|
##### ArchLinux
|
||||||
@@ -295,7 +295,7 @@ Installing prerequisites on ArchLinux:
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
sudo pacman -S base-devel python python-pip \
|
sudo pacman -S base-devel python python-pip \
|
||||||
python-setuptools python-virtualenv sqlite3
|
python-setuptools python-virtualenv sqlite3 icu
|
||||||
```
|
```
|
||||||
|
|
||||||
##### CentOS/Fedora
|
##### CentOS/Fedora
|
||||||
@@ -305,22 +305,34 @@ Installing prerequisites on CentOS or Fedora Linux:
|
|||||||
```sh
|
```sh
|
||||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
|
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
|
||||||
python3-virtualenv libffi-devel openssl-devel python3-devel
|
python3-virtualenv libffi-devel openssl-devel python3-devel \
|
||||||
|
libicu-devel
|
||||||
sudo dnf group install "Development Tools"
|
sudo dnf group install "Development Tools"
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux
|
##### Red Hat Enterprise Linux / Rocky Linux
|
||||||
|
|
||||||
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
|
*Note: The term "RHEL" below refers to both Red Hat Enterprise Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
|
||||||
|
|
||||||
It's recommended to use the latest Python versions.
|
It's recommended to use the latest Python versions.
|
||||||
|
|
||||||
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ships with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
|
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ship with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
|
||||||
|
|
||||||
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
||||||
|
|
||||||
These commands should be run as root user.
|
These commands should be run as root user.
|
||||||
|
|
||||||
|
RHEL 8
|
||||||
|
```bash
|
||||||
|
# Enable PowerTools repository
|
||||||
|
dnf config-manager --set-enabled powertools
|
||||||
|
```
|
||||||
|
RHEL 9
|
||||||
|
```bash
|
||||||
|
# Enable CodeReady Linux Builder repository
|
||||||
|
crb enable
|
||||||
|
```
|
||||||
|
|
||||||
Install new version of Python. You only need one of these:
|
Install new version of Python. You only need one of these:
|
||||||
```bash
|
```bash
|
||||||
# Python 3.11
|
# Python 3.11
|
||||||
@@ -332,7 +344,7 @@ dnf install python3.12 python3.12-devel
|
|||||||
```
|
```
|
||||||
Finally, install common prerequisites
|
Finally, install common prerequisites
|
||||||
```bash
|
```bash
|
||||||
dnf install libpq5 libpq5-devel lz4 pkgconf
|
dnf install libicu libicu-devel libpq5 libpq5-devel lz4 pkgconf
|
||||||
dnf group install "Development Tools"
|
dnf group install "Development Tools"
|
||||||
```
|
```
|
||||||
###### Using venv module instead of virtualenv command
|
###### Using venv module instead of virtualenv command
|
||||||
@@ -364,6 +376,20 @@ xcode-select --install
|
|||||||
|
|
||||||
Some extra dependencies may be needed. You can use Homebrew (https://brew.sh) for them.
|
Some extra dependencies may be needed. You can use Homebrew (https://brew.sh) for them.
|
||||||
|
|
||||||
|
You may need to install icu, and make the icu binaries and libraries accessible.
|
||||||
|
Please follow [the official instructions of PyICU](https://pypi.org/project/PyICU/) to do so.
|
||||||
|
|
||||||
|
If you're struggling to get icu discovered, and see:
|
||||||
|
```
|
||||||
|
RuntimeError:
|
||||||
|
Please install pkg-config on your system or set the ICU_VERSION environment
|
||||||
|
variable to the version of ICU you have installed.
|
||||||
|
```
|
||||||
|
despite it being installed and having your `PATH` updated, you can omit this dependency by
|
||||||
|
not specifying `--extras all` to `poetry`. If using postgres, you can install Synapse via
|
||||||
|
`poetry install --extras saml2 --extras oidc --extras postgres --extras opentracing --extras redis --extras sentry`.
|
||||||
|
ICU is not a hard dependency on getting a working installation.
|
||||||
|
|
||||||
On ARM-based Macs you may also need to install libjpeg and libpq:
|
On ARM-based Macs you may also need to install libjpeg and libpq:
|
||||||
```sh
|
```sh
|
||||||
brew install jpeg libpq
|
brew install jpeg libpq
|
||||||
@@ -385,7 +411,8 @@ Installing prerequisites on openSUSE:
|
|||||||
```sh
|
```sh
|
||||||
sudo zypper in -t pattern devel_basis
|
sudo zypper in -t pattern devel_basis
|
||||||
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||||
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
python-devel libffi-devel libopenssl-devel libjpeg62-devel \
|
||||||
|
libicu-devel
|
||||||
```
|
```
|
||||||
|
|
||||||
##### OpenBSD
|
##### OpenBSD
|
||||||
|
|||||||
@@ -88,8 +88,7 @@ This will install and start a systemd service called `coturn`.
|
|||||||
denied-peer-ip=172.16.0.0-172.31.255.255
|
denied-peer-ip=172.16.0.0-172.31.255.255
|
||||||
|
|
||||||
# recommended additional local peers to block, to mitigate external access to internal services.
|
# recommended additional local peers to block, to mitigate external access to internal services.
|
||||||
# https://www.enablesecurity.com/blog/slack-webrtc-turn-compromise-and-bug-bounty/#how-to-fix-an-open-turn-relay-to-address-this-vulnerability
|
# https://www.rtcsec.com/article/slack-webrtc-turn-compromise-and-bug-bounty/#how-to-fix-an-open-turn-relay-to-address-this-vulnerability
|
||||||
# https://www.enablesecurity.com/blog/cve-2020-26262-bypass-of-coturns-access-control-protection/#further-concerns-what-else
|
|
||||||
no-multicast-peers
|
no-multicast-peers
|
||||||
denied-peer-ip=0.0.0.0-0.255.255.255
|
denied-peer-ip=0.0.0.0-0.255.255.255
|
||||||
denied-peer-ip=100.64.0.0-100.127.255.255
|
denied-peer-ip=100.64.0.0-100.127.255.255
|
||||||
@@ -102,14 +101,6 @@ This will install and start a systemd service called `coturn`.
|
|||||||
denied-peer-ip=198.51.100.0-198.51.100.255
|
denied-peer-ip=198.51.100.0-198.51.100.255
|
||||||
denied-peer-ip=203.0.113.0-203.0.113.255
|
denied-peer-ip=203.0.113.0-203.0.113.255
|
||||||
denied-peer-ip=240.0.0.0-255.255.255.255
|
denied-peer-ip=240.0.0.0-255.255.255.255
|
||||||
denied-peer-ip=::1
|
|
||||||
denied-peer-ip=64:ff9b::-64:ff9b::ffff:ffff
|
|
||||||
denied-peer-ip=::ffff:0.0.0.0-::ffff:255.255.255.255
|
|
||||||
denied-peer-ip=100::-100::ffff:ffff:ffff:ffff
|
|
||||||
denied-peer-ip=2001::-2001:1ff:ffff:ffff:ffff:ffff:ffff:ffff
|
|
||||||
denied-peer-ip=2002::-2002:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
|
||||||
denied-peer-ip=fc00::-fdff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
|
||||||
denied-peer-ip=fe80::-febf:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
|
||||||
|
|
||||||
# special case the turn server itself so that client->TURN->TURN->client flows work
|
# special case the turn server itself so that client->TURN->TURN->client flows work
|
||||||
# this should be one of the turn server's listening IPs
|
# this should be one of the turn server's listening IPs
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ class ExampleSpamChecker:
|
|||||||
async def user_may_invite(self, inviter_userid, invitee_userid, room_id):
|
async def user_may_invite(self, inviter_userid, invitee_userid, room_id):
|
||||||
return True # allow all invites
|
return True # allow all invites
|
||||||
|
|
||||||
async def user_may_create_room(self, userid, room_config):
|
async def user_may_create_room(self, userid):
|
||||||
return True # allow all room creations
|
return True # allow all room creations
|
||||||
|
|
||||||
async def user_may_create_room_alias(self, userid, room_alias):
|
async def user_may_create_room_alias(self, userid, room_alias):
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ As an example, a SSO service may return the email address
|
|||||||
to turn that into a displayname when creating a Matrix user for this individual.
|
to turn that into a displayname when creating a Matrix user for this individual.
|
||||||
It may choose `John Smith`, or `Smith, John [Example.com]` or any number of
|
It may choose `John Smith`, or `Smith, John [Example.com]` or any number of
|
||||||
variations. As each Synapse configuration may want something different, this is
|
variations. As each Synapse configuration may want something different, this is
|
||||||
where SSO mapping providers come into play.
|
where SAML mapping providers come into play.
|
||||||
|
|
||||||
SSO mapping providers are currently supported for OpenID and SAML SSO
|
SSO mapping providers are currently supported for OpenID and SAML SSO
|
||||||
configurations. Please see the details below for how to implement your own.
|
configurations. Please see the details below for how to implement your own.
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ handlers:
|
|||||||
loggers:
|
loggers:
|
||||||
synapse:
|
synapse:
|
||||||
level: INFO
|
level: INFO
|
||||||
handlers: [file]
|
handlers: [remote]
|
||||||
synapse.storage.SQL:
|
synapse.storage.SQL:
|
||||||
level: WARNING
|
level: WARNING
|
||||||
```
|
```
|
||||||
|
|||||||
157
docs/upgrade.md
157
docs/upgrade.md
@@ -117,163 +117,6 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
|||||||
stacking them up. You can monitor the currently running background updates with
|
stacking them up. You can monitor the currently running background updates with
|
||||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||||
|
|
||||||
# Upgrading to v1.139.0
|
|
||||||
|
|
||||||
## Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin
|
|
||||||
|
|
||||||
Ubuntu 24.10 Oracular Oriole [has been end-of-life since 10 Jul
|
|
||||||
2025](https://endoflife.date/ubuntu). This release drops support for Ubuntu
|
|
||||||
24.10, and in its place adds support for Ubuntu 25.04 Plucky Puffin.
|
|
||||||
|
|
||||||
## `/register` requests from old application service implementations may break when using MAS
|
|
||||||
|
|
||||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
|
||||||
/_matrix/client/v3/register` will receive the error
|
|
||||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. This is a
|
|
||||||
result of [MSC4190: Device management for application
|
|
||||||
services](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) which
|
|
||||||
adds new endpoints for application services to create encryption-ready devices
|
|
||||||
with other than `/login` or `/register` without `inhibit_login=true`.
|
|
||||||
|
|
||||||
If an application service you use starts to fail with the mentioned error,
|
|
||||||
ensure it is up to date. If it is, then kindly let the author know that they
|
|
||||||
need to update their implementation to call `/register` with
|
|
||||||
`inhibit_login=true`.
|
|
||||||
|
|
||||||
# Upgrading to v1.136.0
|
|
||||||
|
|
||||||
## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`
|
|
||||||
|
|
||||||
The `run_as_background_process` function is now a method of the `ModuleApi` class. If
|
|
||||||
you were using the function directly from the module API, it will continue to work fine
|
|
||||||
but the background process metrics will not include an accurate `server_name` label.
|
|
||||||
This kind of metric labeling isn't relevant for many use cases and is used to
|
|
||||||
differentiate Synapse instances running in the same Python process (relevant to Synapse
|
|
||||||
Pro: Small Hosts). We recommend updating your usage to use the new
|
|
||||||
`ModuleApi.run_as_background_process` method to stay on top of future changes.
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>Example <code>run_as_background_process</code> upgrade</summary>
|
|
||||||
|
|
||||||
Before:
|
|
||||||
```python
|
|
||||||
class MyModule:
|
|
||||||
def __init__(self, module_api: ModuleApi) -> None:
|
|
||||||
run_as_background_process(__name__ + ":setup_database", self.setup_database)
|
|
||||||
```
|
|
||||||
|
|
||||||
After:
|
|
||||||
```python
|
|
||||||
class MyModule:
|
|
||||||
def __init__(self, module_api: ModuleApi) -> None:
|
|
||||||
module_api.run_as_background_process(__name__ + ":setup_database", self.setup_database)
|
|
||||||
```
|
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
## Metric labels have changed on `synapse_federation_last_received_pdu_time` and `synapse_federation_last_sent_pdu_time`
|
|
||||||
|
|
||||||
Previously, the `synapse_federation_last_received_pdu_time` and
|
|
||||||
`synapse_federation_last_sent_pdu_time` metrics both used the `server_name` label to
|
|
||||||
differentiate between different servers that we send and receive events from.
|
|
||||||
|
|
||||||
Since we're now using the `server_name` label to differentiate between different Synapse
|
|
||||||
homeserver instances running in the same process, these metrics have been changed as follows:
|
|
||||||
|
|
||||||
- `synapse_federation_last_received_pdu_time` now uses the `origin_server_name` label
|
|
||||||
- `synapse_federation_last_sent_pdu_time` now uses the `destination_server_name` label
|
|
||||||
|
|
||||||
The Grafana dashboard JSON in `contrib/grafana/synapse.json` has been updated to reflect
|
|
||||||
this change but you will need to manually update your own existing Grafana dashboards
|
|
||||||
using these metrics.
|
|
||||||
|
|
||||||
## Stable integration with Matrix Authentication Service
|
|
||||||
|
|
||||||
Support for [Matrix Authentication Service (MAS)](https://github.com/element-hq/matrix-authentication-service) is now stable, with a simplified configuration.
|
|
||||||
This stable integration requires MAS 0.20.0 or later.
|
|
||||||
|
|
||||||
The existing `experimental_features.msc3861` configuration option is now deprecated and will be removed in Synapse v1.137.0.
|
|
||||||
|
|
||||||
Synapse deployments already using MAS should now use the new configuration options:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
matrix_authentication_service:
|
|
||||||
# Enable the MAS integration
|
|
||||||
enabled: true
|
|
||||||
# The base URL where Synapse will contact MAS
|
|
||||||
endpoint: http://localhost:8080
|
|
||||||
# The shared secret used to authenticate MAS requests, must be the same as `matrix.secret` in the MAS configuration
|
|
||||||
# See https://element-hq.github.io/matrix-authentication-service/reference/configuration.html#matrix
|
|
||||||
secret: "asecurerandomsecretstring"
|
|
||||||
```
|
|
||||||
|
|
||||||
They must remove the `experimental_features.msc3861` configuration option from their configuration.
|
|
||||||
|
|
||||||
They can also remove the client previously used by Synapse [in the MAS configuration](https://element-hq.github.io/matrix-authentication-service/reference/configuration.html#clients) as it is no longer in use.
|
|
||||||
|
|
||||||
# Upgrading to v1.135.0
|
|
||||||
|
|
||||||
## `on_user_registration` module API callback may now run on any worker
|
|
||||||
|
|
||||||
Previously, the `on_user_registration` callback would only run on the main
|
|
||||||
process. Modules relying on this callback must assume that they may now be
|
|
||||||
called from any worker, not just the main process.
|
|
||||||
|
|
||||||
# Upgrading to v1.134.0
|
|
||||||
|
|
||||||
## ICU bundled with Synapse
|
|
||||||
|
|
||||||
Synapse now uses the Rust `icu` library for improved user search. Installing the
|
|
||||||
native ICU library on your system is no longer required.
|
|
||||||
|
|
||||||
# Upgrading to v1.130.0
|
|
||||||
|
|
||||||
## Documented endpoint which can be delegated to a federation worker
|
|
||||||
|
|
||||||
The endpoint `^/_matrix/federation/v1/version$` can be delegated to a federation
|
|
||||||
worker. This is not new behaviour, but had not been documented yet. The
|
|
||||||
[list of delegatable endpoints](workers.md#synapseappgeneric_worker) has
|
|
||||||
been updated to include it. Make sure to check your reverse proxy rules if you
|
|
||||||
are using workers.
|
|
||||||
|
|
||||||
# Upgrading to v1.126.0
|
|
||||||
|
|
||||||
## Room list publication rules change
|
|
||||||
|
|
||||||
The default [`room_list_publication_rules`] setting was changed to disallow
|
|
||||||
anyone (except server admins) from publishing to the room list by default.
|
|
||||||
|
|
||||||
This is in line with Synapse policy of locking down features by default that can
|
|
||||||
be abused without moderation.
|
|
||||||
|
|
||||||
To keep the previous behavior of allowing publication by default, add the
|
|
||||||
following to the config:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
room_list_publication_rules:
|
|
||||||
- "action": "allow"
|
|
||||||
```
|
|
||||||
|
|
||||||
[`room_list_publication_rules`]: usage/configuration/config_documentation.md#room_list_publication_rules
|
|
||||||
|
|
||||||
## Change of signing key expiry date for the Debian/Ubuntu package repository
|
|
||||||
|
|
||||||
Administrators using the Debian/Ubuntu packages from `packages.matrix.org`,
|
|
||||||
please be aware that we have recently updated the expiry date on the repository's GPG signing key,
|
|
||||||
but this change must be imported into your keyring.
|
|
||||||
|
|
||||||
If you have the `matrix-org-archive-keyring` package installed and it updates before the current key expires, this should
|
|
||||||
happen automatically.
|
|
||||||
|
|
||||||
Otherwise, if you see an error similar to `The following signatures were invalid: EXPKEYSIG F473DD4473365DE1`, you
|
|
||||||
will need to get a fresh copy of the keys. You can do so with:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
|
||||||
```
|
|
||||||
|
|
||||||
The old version of the key will expire on `2025-03-15`.
|
|
||||||
|
|
||||||
# Upgrading to v1.122.0
|
# Upgrading to v1.122.0
|
||||||
|
|
||||||
## Dropping support for PostgreSQL 11 and 12
|
## Dropping support for PostgreSQL 11 and 12
|
||||||
|
|||||||
@@ -160,7 +160,7 @@ Using the following curl command:
|
|||||||
```console
|
```console
|
||||||
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
|
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
|
||||||
```
|
```
|
||||||
`<access-token>` - can be obtained in element by looking in All settings, clicking Help & About and down the bottom is:
|
`<access-token>` - can be obtained in riot by looking in the riot settings, down the bottom is:
|
||||||
Access Token:\<click to reveal\>
|
Access Token:\<click to reveal\>
|
||||||
|
|
||||||
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org
|
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org
|
||||||
@@ -255,7 +255,7 @@ line to `/etc/default/matrix-synapse`:
|
|||||||
|
|
||||||
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2
|
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2
|
||||||
|
|
||||||
*Note*: You may need to set `PYTHONMALLOC=malloc` to ensure that `jemalloc` can accurately calculate memory usage. By default, Python uses its internal small-object allocator, which may interfere with jemalloc's ability to track memory consumption correctly. This could prevent the [cache_autotuning](../configuration/config_documentation.md#caches) feature from functioning as expected, as the Python allocator may not reach the memory threshold set by `max_cache_memory_usage`, thus not triggering the cache eviction process.
|
*Note*: You may need to set `PYTHONMALLOC=malloc` to ensure that `jemalloc` can accurately calculate memory usage. By default, Python uses its internal small-object allocator, which may interfere with jemalloc's ability to track memory consumption correctly. This could prevent the [cache_autotuning](../configuration/config_documentation.md#caches-and-associated-values) feature from functioning as expected, as the Python allocator may not reach the memory threshold set by `max_cache_memory_usage`, thus not triggering the cache eviction process.
|
||||||
|
|
||||||
This made a significant difference on Python 2.7 - it's unclear how
|
This made a significant difference on Python 2.7 - it's unclear how
|
||||||
much of an improvement it provides on Python 3.x.
|
much of an improvement it provides on Python 3.x.
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ The following statistics are sent to the configured reporting endpoint:
|
|||||||
| `python_version` | string | The Python version number in use (e.g "3.7.1"). Taken from `sys.version_info`. |
|
| `python_version` | string | The Python version number in use (e.g "3.7.1"). Taken from `sys.version_info`. |
|
||||||
| `total_users` | int | The number of registered users on the homeserver. |
|
| `total_users` | int | The number of registered users on the homeserver. |
|
||||||
| `total_nonbridged_users` | int | The number of users, excluding those created by an Application Service. |
|
| `total_nonbridged_users` | int | The number of users, excluding those created by an Application Service. |
|
||||||
| `daily_user_type_native` | int | The number of native, non-guest users created in the last 24 hours. |
|
| `daily_user_type_native` | int | The number of native users created in the last 24 hours. |
|
||||||
| `daily_user_type_guest` | int | The number of guest users created in the last 24 hours. |
|
| `daily_user_type_guest` | int | The number of guest users created in the last 24 hours. |
|
||||||
| `daily_user_type_bridged` | int | The number of users created by Application Services in the last 24 hours. |
|
| `daily_user_type_bridged` | int | The number of users created by Application Services in the last 24 hours. |
|
||||||
| `total_room_count` | int | The total number of rooms present on the homeserver. |
|
| `total_room_count` | int | The total number of rooms present on the homeserver. |
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -77,11 +77,14 @@ The user provided search term is lowercased and normalized using [NFKC](https://
|
|||||||
this treats the string as case-insensitive, canonicalizes different forms of the
|
this treats the string as case-insensitive, canonicalizes different forms of the
|
||||||
same text, and maps some "roughly equivalent" characters together.
|
same text, and maps some "roughly equivalent" characters together.
|
||||||
|
|
||||||
The search term is then split into segments using the [`icu_segmenter`
|
The search term is then split into words:
|
||||||
Rust crate](https://crates.io/crates/icu_segmenter). This crate ships with its
|
|
||||||
own dictionary and Long Short Term-Memory (LSTM) machine learning models
|
* If [ICU](https://en.wikipedia.org/wiki/International_Components_for_Unicode) is
|
||||||
per-language to segment words. Read more [in the crate's
|
available, then the system's [default locale](https://unicode-org.github.io/icu/userguide/locale/#default-locales)
|
||||||
documentation](https://docs.rs/icu/latest/icu/segmenter/struct.WordSegmenter.html#method.new_auto).
|
will be used to break the search term into words. (See the
|
||||||
|
[installation instructions](setup/installation.md) for how to install ICU.)
|
||||||
|
* If unavailable, then runs of ASCII characters, numbers, underscores, and hyphens
|
||||||
|
are considered words.
|
||||||
|
|
||||||
The queries for PostgreSQL and SQLite are detailed below, but their overall goal
|
The queries for PostgreSQL and SQLite are detailed below, but their overall goal
|
||||||
is to find matching users, preferring users who are "real" (e.g. not bots,
|
is to find matching users, preferring users who are "real" (e.g. not bots,
|
||||||
|
|||||||
@@ -200,7 +200,6 @@ information.
|
|||||||
^/_matrix/client/(api/v1|r0|v3)/rooms/[^/]+/initialSync$
|
^/_matrix/client/(api/v1|r0|v3)/rooms/[^/]+/initialSync$
|
||||||
|
|
||||||
# Federation requests
|
# Federation requests
|
||||||
^/_matrix/federation/v1/version$
|
|
||||||
^/_matrix/federation/v1/event/
|
^/_matrix/federation/v1/event/
|
||||||
^/_matrix/federation/v1/state/
|
^/_matrix/federation/v1/state/
|
||||||
^/_matrix/federation/v1/state_ids/
|
^/_matrix/federation/v1/state_ids/
|
||||||
@@ -238,9 +237,7 @@ information.
|
|||||||
^/_matrix/client/unstable/im.nheko.summary/summary/.*$
|
^/_matrix/client/unstable/im.nheko.summary/summary/.*$
|
||||||
^/_matrix/client/(r0|v3|unstable)/account/3pid$
|
^/_matrix/client/(r0|v3|unstable)/account/3pid$
|
||||||
^/_matrix/client/(r0|v3|unstable)/account/whoami$
|
^/_matrix/client/(r0|v3|unstable)/account/whoami$
|
||||||
^/_matrix/client/(r0|v3|unstable)/account/deactivate$
|
^/_matrix/client/(r0|v3|unstable)/devices$
|
||||||
^/_matrix/client/(r0|v3)/delete_devices$
|
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)
|
|
||||||
^/_matrix/client/versions$
|
^/_matrix/client/versions$
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$
|
^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event/
|
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event/
|
||||||
@@ -252,16 +249,13 @@ information.
|
|||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$
|
^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$
|
||||||
^/_matrix/client/(r0|v3|unstable)/capabilities$
|
^/_matrix/client/(r0|v3|unstable)/capabilities$
|
||||||
^/_matrix/client/(r0|v3|unstable)/notifications$
|
^/_matrix/client/(r0|v3|unstable)/notifications$
|
||||||
^/_synapse/admin/v1/rooms/[^/]+$
|
|
||||||
|
|
||||||
# Encryption requests
|
# Encryption requests
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
||||||
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/upload
|
^/_matrix/client/(r0|v3|unstable)/keys/upload/
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$
|
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$
|
|
||||||
|
|
||||||
# Registration/login requests
|
# Registration/login requests
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
||||||
@@ -326,14 +320,6 @@ For multiple workers not handling the SSO endpoints properly, see
|
|||||||
[#7530](https://github.com/matrix-org/synapse/issues/7530) and
|
[#7530](https://github.com/matrix-org/synapse/issues/7530) and
|
||||||
[#9427](https://github.com/matrix-org/synapse/issues/9427).
|
[#9427](https://github.com/matrix-org/synapse/issues/9427).
|
||||||
|
|
||||||
Additionally, when MSC3861 is enabled (`experimental_features.msc3861.enabled`
|
|
||||||
set to `true`), the following endpoints can be handled by the worker:
|
|
||||||
|
|
||||||
^/_synapse/admin/v2/users/[^/]+$
|
|
||||||
^/_synapse/admin/v1/username_available$
|
|
||||||
^/_synapse/admin/v1/users/[^/]+/_allow_cross_signing_replacement_without_uia$
|
|
||||||
^/_synapse/admin/v1/users/[^/]+/devices$
|
|
||||||
|
|
||||||
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
|
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
|
||||||
with `client` and `federation` `resources` must be configured in the
|
with `client` and `federation` `resources` must be configured in the
|
||||||
[`worker_listeners`](usage/configuration/config_documentation.md#worker_listeners)
|
[`worker_listeners`](usage/configuration/config_documentation.md#worker_listeners)
|
||||||
@@ -532,9 +518,8 @@ the stream writer for the `account_data` stream:
|
|||||||
|
|
||||||
##### The `receipts` stream
|
##### The `receipts` stream
|
||||||
|
|
||||||
The `receipts` stream supports multiple writers. The following endpoints
|
The following endpoints should be routed directly to the worker configured as
|
||||||
can be handled by any worker, but should be routed directly to one of the workers
|
the stream writer for the `receipts` stream:
|
||||||
configured as stream writer for the `receipts` stream:
|
|
||||||
|
|
||||||
^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt
|
^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt
|
||||||
^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers
|
^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers
|
||||||
@@ -553,18 +538,6 @@ the stream writer for the `push_rules` stream:
|
|||||||
|
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
||||||
|
|
||||||
##### The `device_lists` stream
|
|
||||||
|
|
||||||
The `device_lists` stream supports multiple writers. The following endpoints
|
|
||||||
can be handled by any worker, but should be routed directly to one of the workers
|
|
||||||
configured as stream writer for the `device_lists` stream:
|
|
||||||
|
|
||||||
^/_matrix/client/(r0|v3)/delete_devices$
|
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)
|
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/upload
|
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$
|
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$
|
|
||||||
|
|
||||||
#### Restrict outbound federation traffic to a specific set of workers
|
#### Restrict outbound federation traffic to a specific set of workers
|
||||||
|
|
||||||
The
|
The
|
||||||
|
|||||||
@@ -96,6 +96,7 @@
|
|||||||
gnumake
|
gnumake
|
||||||
|
|
||||||
# Native dependencies for running Synapse.
|
# Native dependencies for running Synapse.
|
||||||
|
icu
|
||||||
libffi
|
libffi
|
||||||
libjpeg
|
libjpeg
|
||||||
libpqxx
|
libpqxx
|
||||||
|
|||||||
16
mypy.ini
16
mypy.ini
@@ -1,17 +1,6 @@
|
|||||||
[mypy]
|
[mypy]
|
||||||
namespace_packages = True
|
namespace_packages = True
|
||||||
# Our custom mypy plugin should remain first in this list.
|
plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
|
||||||
#
|
|
||||||
# mypy has a limitation where it only chooses the first plugin that returns a non-None
|
|
||||||
# value for each hook (known-limitation, c.f.
|
|
||||||
# https://github.com/python/mypy/issues/19524). We workaround this by putting our custom
|
|
||||||
# plugin first in the plugin order and then manually calling any other conflicting
|
|
||||||
# plugin hooks in our own plugin followed by our own checks.
|
|
||||||
#
|
|
||||||
# If you add a new plugin, make sure to check whether the hooks being used conflict with
|
|
||||||
# our custom plugin hooks and if so, manually call the other plugin's hooks in our
|
|
||||||
# custom plugin. (also applies to if the plugin is updated in the future)
|
|
||||||
plugins = scripts-dev/mypy_synapse_plugin.py, pydantic.mypy, mypy_zope:plugin
|
|
||||||
follow_imports = normal
|
follow_imports = normal
|
||||||
show_error_codes = True
|
show_error_codes = True
|
||||||
show_traceback = True
|
show_traceback = True
|
||||||
@@ -110,6 +99,3 @@ ignore_missing_imports = True
|
|||||||
|
|
||||||
[mypy-multipart.*]
|
[mypy-multipart.*]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
|
|
||||||
[mypy-mypy_zope.*]
|
|
||||||
ignore_missing_imports = True
|
|
||||||
|
|||||||
2234
poetry.lock
generated
2234
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -74,10 +74,6 @@ select = [
|
|||||||
"PIE",
|
"PIE",
|
||||||
# flake8-executable
|
# flake8-executable
|
||||||
"EXE",
|
"EXE",
|
||||||
# flake8-logging
|
|
||||||
"LOG",
|
|
||||||
# flake8-logging-format
|
|
||||||
"G",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff.lint.isort]
|
[tool.ruff.lint.isort]
|
||||||
@@ -101,7 +97,7 @@ module-name = "synapse.synapse_rust"
|
|||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "matrix-synapse"
|
name = "matrix-synapse"
|
||||||
version = "1.139.0"
|
version = "1.122.0"
|
||||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||||
license = "AGPL-3.0-or-later"
|
license = "AGPL-3.0-or-later"
|
||||||
@@ -178,13 +174,8 @@ signedjson = "^1.1.0"
|
|||||||
service-identity = ">=18.1.0"
|
service-identity = ">=18.1.0"
|
||||||
# Twisted 18.9 introduces some logger improvements that the structured
|
# Twisted 18.9 introduces some logger improvements that the structured
|
||||||
# logger utilises
|
# logger utilises
|
||||||
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
|
Twisted = {extras = ["tls"], version = ">=18.9.0"}
|
||||||
# Twisted 21.2.0 introduces contextvar support.
|
treq = ">=15.1"
|
||||||
# We could likely bump this to 22.1 without making distro packagers'
|
|
||||||
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
|
|
||||||
# RHEL 9: 22.10)
|
|
||||||
Twisted = {extras = ["tls"], version = ">=21.2.0"}
|
|
||||||
treq = ">=21.5.0"
|
|
||||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||||
pyOpenSSL = ">=16.0.0"
|
pyOpenSSL = ">=16.0.0"
|
||||||
PyYAML = ">=5.3"
|
PyYAML = ">=5.3"
|
||||||
@@ -200,9 +191,7 @@ pymacaroons = ">=0.13.0"
|
|||||||
msgpack = ">=0.5.2"
|
msgpack = ">=0.5.2"
|
||||||
phonenumbers = ">=8.2.0"
|
phonenumbers = ">=8.2.0"
|
||||||
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
||||||
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
|
prometheus-client = ">=0.4.0"
|
||||||
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
|
|
||||||
prometheus-client = ">=0.6.0"
|
|
||||||
# we use `order`, which arrived in attrs 19.2.0.
|
# we use `order`, which arrived in attrs 19.2.0.
|
||||||
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
||||||
attrs = ">=19.2.0,!=21.1.0"
|
attrs = ">=19.2.0,!=21.1.0"
|
||||||
@@ -231,7 +220,7 @@ pydantic = ">=1.7.4, <3"
|
|||||||
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
||||||
# `poetry build` do the right thing without this explicit dependency.
|
# `poetry build` do the right thing without this explicit dependency.
|
||||||
#
|
#
|
||||||
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
|
# This isn't really a dev-dependency, as `poetry install --no-dev` will fail,
|
||||||
# but the alternative is to add it to the main list of deps where it isn't
|
# but the alternative is to add it to the main list of deps where it isn't
|
||||||
# needed.
|
# needed.
|
||||||
setuptools_rust = ">=1.3"
|
setuptools_rust = ">=1.3"
|
||||||
@@ -261,6 +250,7 @@ hiredis = { version = "*", optional = true }
|
|||||||
Pympler = { version = "*", optional = true }
|
Pympler = { version = "*", optional = true }
|
||||||
parameterized = { version = ">=0.7.4", optional = true }
|
parameterized = { version = ">=0.7.4", optional = true }
|
||||||
idna = { version = ">=2.5", optional = true }
|
idna = { version = ">=2.5", optional = true }
|
||||||
|
pyicu = { version = ">=2.10.2", optional = true }
|
||||||
|
|
||||||
[tool.poetry.extras]
|
[tool.poetry.extras]
|
||||||
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
|
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
|
||||||
@@ -283,6 +273,10 @@ redis = ["txredisapi", "hiredis"]
|
|||||||
# Required to use experimental `caches.track_memory_usage` config option.
|
# Required to use experimental `caches.track_memory_usage` config option.
|
||||||
cache-memory = ["pympler"]
|
cache-memory = ["pympler"]
|
||||||
test = ["parameterized", "idna"]
|
test = ["parameterized", "idna"]
|
||||||
|
# Allows for better search for international characters in the user directory. This
|
||||||
|
# requires libicu's development headers installed on the system (e.g. libicu-dev on
|
||||||
|
# Debian-based distributions).
|
||||||
|
user-search = ["pyicu"]
|
||||||
|
|
||||||
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
|
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
|
||||||
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
|
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
|
||||||
@@ -314,6 +308,8 @@ all = [
|
|||||||
"txredisapi", "hiredis",
|
"txredisapi", "hiredis",
|
||||||
# cache-memory
|
# cache-memory
|
||||||
"pympler",
|
"pympler",
|
||||||
|
# improved user search
|
||||||
|
"pyicu",
|
||||||
# omitted:
|
# omitted:
|
||||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||||
# - systemd: this is a system-based requirement
|
# - systemd: this is a system-based requirement
|
||||||
@@ -324,7 +320,7 @@ all = [
|
|||||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||||
# can bump versions without having to update the content-hash in the lockfile.
|
# can bump versions without having to update the content-hash in the lockfile.
|
||||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||||
ruff = "0.12.10"
|
ruff = "0.7.3"
|
||||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||||
pydantic = "^2"
|
pydantic = "^2"
|
||||||
|
|
||||||
@@ -333,6 +329,7 @@ lxml-stubs = ">=0.4.0"
|
|||||||
mypy = "*"
|
mypy = "*"
|
||||||
mypy-zope = "*"
|
mypy-zope = "*"
|
||||||
types-bleach = ">=4.1.0"
|
types-bleach = ">=4.1.0"
|
||||||
|
types-commonmark = ">=0.9.2"
|
||||||
types-jsonschema = ">=3.2.0"
|
types-jsonschema = ">=3.2.0"
|
||||||
types-netaddr = ">=0.8.0.6"
|
types-netaddr = ">=0.8.0.6"
|
||||||
types-opentracing = ">=2.4.2"
|
types-opentracing = ">=2.4.2"
|
||||||
@@ -355,7 +352,7 @@ idna = ">=2.5"
|
|||||||
click = ">=8.1.3"
|
click = ">=8.1.3"
|
||||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||||
GitPython = ">=3.1.20"
|
GitPython = ">=3.1.20"
|
||||||
markdown-it-py = ">=3.0.0"
|
commonmark = ">=0.9.1"
|
||||||
pygithub = ">=1.55"
|
pygithub = ">=1.55"
|
||||||
# The following are executed as commands by the release script.
|
# The following are executed as commands by the release script.
|
||||||
twine = "*"
|
twine = "*"
|
||||||
@@ -373,7 +370,7 @@ tomli = ">=1.2.3"
|
|||||||
# runtime errors caused by build system changes.
|
# runtime errors caused by build system changes.
|
||||||
# We are happy to raise these upper bounds upon request,
|
# We are happy to raise these upper bounds upon request,
|
||||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||||
requires = ["poetry-core>=1.1.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
|
requires = ["poetry-core>=1.1.0,<=1.9.1", "setuptools_rust>=1.3,<=1.10.2"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
|
|
||||||
@@ -381,19 +378,19 @@ build-backend = "poetry.core.masonry.api"
|
|||||||
# Skip unsupported platforms (by us or by Rust).
|
# Skip unsupported platforms (by us or by Rust).
|
||||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||||
# We skip:
|
# We skip:
|
||||||
# - CPython and PyPy 3.8: EOLed
|
# - CPython 3.6, 3.7 and 3.8: EOLed
|
||||||
|
# - PyPy 3.7 and 3.8: we only support Python 3.9+
|
||||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||||
skip = "cp38* pp38* *-musllinux_i686"
|
# - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
|
||||||
# Enable non-default builds.
|
# c.f. https://github.com/matrix-org/synapse/pull/14259
|
||||||
# "pypy" used to be included by default up until cibuildwheel 3.
|
skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
|
||||||
enable = "pypy"
|
|
||||||
|
|
||||||
# We need a rust compiler.
|
# We need a rust compiler.
|
||||||
#
|
#
|
||||||
# We temporarily pin Rust to 1.82.0 to work around
|
# We temporarily pin Rust to 1.82.0 to work around
|
||||||
# https://github.com/element-hq/synapse/issues/17988
|
# https://github.com/element-hq/synapse/issues/17988
|
||||||
before-all = "sh .ci/before_build_wheel.sh"
|
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal"
|
||||||
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
|
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
|
||||||
|
|
||||||
# For some reason if we don't manually clean the build directory we
|
# For some reason if we don't manually clean the build directory we
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ name = "synapse"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.82.0"
|
rust-version = "1.66.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "synapse"
|
name = "synapse"
|
||||||
@@ -23,36 +23,26 @@ name = "synapse.synapse_rust"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.63"
|
anyhow = "1.0.63"
|
||||||
base64 = "0.22.1"
|
base64 = "0.21.7"
|
||||||
bytes = "1.6.0"
|
bytes = "1.6.0"
|
||||||
headers = "0.4.0"
|
headers = "0.4.0"
|
||||||
http = "1.1.0"
|
http = "1.1.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
mime = "0.3.17"
|
mime = "0.3.17"
|
||||||
pyo3 = { version = "0.25.1", features = [
|
pyo3 = { version = "0.23.2", features = [
|
||||||
"macros",
|
"macros",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"abi3",
|
"abi3",
|
||||||
"abi3-py39",
|
"abi3-py38",
|
||||||
] }
|
] }
|
||||||
pyo3-log = "0.12.4"
|
pyo3-log = "0.12.0"
|
||||||
pythonize = "0.25.0"
|
pythonize = "0.23.0"
|
||||||
regex = "1.6.0"
|
regex = "1.6.0"
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
serde = { version = "1.0.144", features = ["derive"] }
|
serde = { version = "1.0.144", features = ["derive"] }
|
||||||
serde_json = "1.0.85"
|
serde_json = "1.0.85"
|
||||||
ulid = "1.1.2"
|
ulid = "1.1.2"
|
||||||
icu_segmenter = "2.0.0"
|
|
||||||
reqwest = { version = "0.12.15", default-features = false, features = [
|
|
||||||
"http2",
|
|
||||||
"stream",
|
|
||||||
"rustls-tls-native-roots",
|
|
||||||
] }
|
|
||||||
http-body-util = "0.1.3"
|
|
||||||
futures = "0.3.31"
|
|
||||||
tokio = { version = "1.44.2", features = ["rt", "rt-multi-thread"] }
|
|
||||||
once_cell = "1.18.0"
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
extension-module = ["pyo3/extension-module"]
|
extension-module = ["pyo3/extension-module"]
|
||||||
|
|||||||
@@ -61,7 +61,6 @@ fn bench_match_exact(b: &mut Bencher) {
|
|||||||
vec![],
|
vec![],
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -72,10 +71,10 @@ fn bench_match_exact(b: &mut Bencher) {
|
|||||||
},
|
},
|
||||||
));
|
));
|
||||||
|
|
||||||
let matched = eval.match_condition(&condition, None, None, None).unwrap();
|
let matched = eval.match_condition(&condition, None, None).unwrap();
|
||||||
assert!(matched, "Didn't match");
|
assert!(matched, "Didn't match");
|
||||||
|
|
||||||
b.iter(|| eval.match_condition(&condition, None, None, None).unwrap());
|
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
#[bench]
|
||||||
@@ -108,7 +107,6 @@ fn bench_match_word(b: &mut Bencher) {
|
|||||||
vec![],
|
vec![],
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -119,10 +117,10 @@ fn bench_match_word(b: &mut Bencher) {
|
|||||||
},
|
},
|
||||||
));
|
));
|
||||||
|
|
||||||
let matched = eval.match_condition(&condition, None, None, None).unwrap();
|
let matched = eval.match_condition(&condition, None, None).unwrap();
|
||||||
assert!(matched, "Didn't match");
|
assert!(matched, "Didn't match");
|
||||||
|
|
||||||
b.iter(|| eval.match_condition(&condition, None, None, None).unwrap());
|
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
#[bench]
|
||||||
@@ -155,7 +153,6 @@ fn bench_match_word_miss(b: &mut Bencher) {
|
|||||||
vec![],
|
vec![],
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -166,10 +163,10 @@ fn bench_match_word_miss(b: &mut Bencher) {
|
|||||||
},
|
},
|
||||||
));
|
));
|
||||||
|
|
||||||
let matched = eval.match_condition(&condition, None, None, None).unwrap();
|
let matched = eval.match_condition(&condition, None, None).unwrap();
|
||||||
assert!(!matched, "Didn't match");
|
assert!(!matched, "Didn't match");
|
||||||
|
|
||||||
b.iter(|| eval.match_condition(&condition, None, None, None).unwrap());
|
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
#[bench]
|
||||||
@@ -202,7 +199,6 @@ fn bench_eval_message(b: &mut Bencher) {
|
|||||||
vec![],
|
vec![],
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -214,8 +210,7 @@ fn bench_eval_message(b: &mut Bencher) {
|
|||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
b.iter(|| eval.run(&rules, Some("bob"), Some("person"), None));
|
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -58,15 +58,3 @@ impl NotFoundError {
|
|||||||
NotFoundError::new_err(())
|
NotFoundError::new_err(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
import_exception!(synapse.api.errors, HttpResponseException);
|
|
||||||
|
|
||||||
impl HttpResponseException {
|
|
||||||
pub fn new(status: StatusCode, bytes: Vec<u8>) -> pyo3::PyErr {
|
|
||||||
HttpResponseException::new_err((
|
|
||||||
status.as_u16(),
|
|
||||||
status.canonical_reason().unwrap_or_default(),
|
|
||||||
bytes,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -54,7 +54,6 @@ enum EventInternalMetadataData {
|
|||||||
RecheckRedaction(bool),
|
RecheckRedaction(bool),
|
||||||
SoftFailed(bool),
|
SoftFailed(bool),
|
||||||
ProactivelySend(bool),
|
ProactivelySend(bool),
|
||||||
PolicyServerSpammy(bool),
|
|
||||||
Redacted(bool),
|
Redacted(bool),
|
||||||
TxnId(Box<str>),
|
TxnId(Box<str>),
|
||||||
TokenId(i64),
|
TokenId(i64),
|
||||||
@@ -97,13 +96,6 @@ impl EventInternalMetadataData {
|
|||||||
.to_owned()
|
.to_owned()
|
||||||
.into_any(),
|
.into_any(),
|
||||||
),
|
),
|
||||||
EventInternalMetadataData::PolicyServerSpammy(o) => (
|
|
||||||
pyo3::intern!(py, "policy_server_spammy"),
|
|
||||||
o.into_pyobject(py)
|
|
||||||
.unwrap_infallible()
|
|
||||||
.to_owned()
|
|
||||||
.into_any(),
|
|
||||||
),
|
|
||||||
EventInternalMetadataData::Redacted(o) => (
|
EventInternalMetadataData::Redacted(o) => (
|
||||||
pyo3::intern!(py, "redacted"),
|
pyo3::intern!(py, "redacted"),
|
||||||
o.into_pyobject(py)
|
o.into_pyobject(py)
|
||||||
@@ -163,11 +155,6 @@ impl EventInternalMetadataData {
|
|||||||
.extract()
|
.extract()
|
||||||
.with_context(|| format!("'{key_str}' has invalid type"))?,
|
.with_context(|| format!("'{key_str}' has invalid type"))?,
|
||||||
),
|
),
|
||||||
"policy_server_spammy" => EventInternalMetadataData::PolicyServerSpammy(
|
|
||||||
value
|
|
||||||
.extract()
|
|
||||||
.with_context(|| format!("'{key_str}' has invalid type"))?,
|
|
||||||
),
|
|
||||||
"redacted" => EventInternalMetadataData::Redacted(
|
"redacted" => EventInternalMetadataData::Redacted(
|
||||||
value
|
value
|
||||||
.extract()
|
.extract()
|
||||||
@@ -440,17 +427,6 @@ impl EventInternalMetadata {
|
|||||||
set_property!(self, ProactivelySend, obj);
|
set_property!(self, ProactivelySend, obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[getter]
|
|
||||||
fn get_policy_server_spammy(&self) -> PyResult<bool> {
|
|
||||||
Ok(get_property_opt!(self, PolicyServerSpammy)
|
|
||||||
.copied()
|
|
||||||
.unwrap_or(false))
|
|
||||||
}
|
|
||||||
#[setter]
|
|
||||||
fn set_policy_server_spammy(&mut self, obj: bool) {
|
|
||||||
set_property!(self, PolicyServerSpammy, obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[getter]
|
#[getter]
|
||||||
fn get_redacted(&self) -> PyResult<bool> {
|
fn get_redacted(&self) -> PyResult<bool> {
|
||||||
let bool = get_property!(self, Redacted)?;
|
let bool = get_property!(self, Redacted)?;
|
||||||
|
|||||||
@@ -1,303 +0,0 @@
|
|||||||
/*
|
|
||||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
*
|
|
||||||
* Copyright (C) 2025 New Vector, Ltd
|
|
||||||
*
|
|
||||||
* This program is free software: you can redistribute it and/or modify
|
|
||||||
* it under the terms of the GNU Affero General Public License as
|
|
||||||
* published by the Free Software Foundation, either version 3 of the
|
|
||||||
* License, or (at your option) any later version.
|
|
||||||
*
|
|
||||||
* See the GNU Affero General Public License for more details:
|
|
||||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
*/
|
|
||||||
|
|
||||||
use std::{collections::HashMap, future::Future};
|
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use futures::TryStreamExt;
|
|
||||||
use once_cell::sync::OnceCell;
|
|
||||||
use pyo3::{create_exception, exceptions::PyException, prelude::*};
|
|
||||||
use reqwest::RequestBuilder;
|
|
||||||
use tokio::runtime::Runtime;
|
|
||||||
|
|
||||||
use crate::errors::HttpResponseException;
|
|
||||||
|
|
||||||
create_exception!(
|
|
||||||
synapse.synapse_rust.http_client,
|
|
||||||
RustPanicError,
|
|
||||||
PyException,
|
|
||||||
"A panic which happened in a Rust future"
|
|
||||||
);
|
|
||||||
|
|
||||||
impl RustPanicError {
|
|
||||||
fn from_panic(panic_err: &(dyn std::any::Any + Send + 'static)) -> PyErr {
|
|
||||||
// Apparently this is how you extract the panic message from a panic
|
|
||||||
let panic_message = if let Some(str_slice) = panic_err.downcast_ref::<&str>() {
|
|
||||||
str_slice
|
|
||||||
} else if let Some(string) = panic_err.downcast_ref::<String>() {
|
|
||||||
string
|
|
||||||
} else {
|
|
||||||
"unknown error"
|
|
||||||
};
|
|
||||||
Self::new_err(panic_message.to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This is the name of the attribute where we store the runtime on the reactor
|
|
||||||
static TOKIO_RUNTIME_ATTR: &str = "__synapse_rust_tokio_runtime";
|
|
||||||
|
|
||||||
/// A Python wrapper around a Tokio runtime.
|
|
||||||
///
|
|
||||||
/// This allows us to 'store' the runtime on the reactor instance, starting it
|
|
||||||
/// when the reactor starts, and stopping it when the reactor shuts down.
|
|
||||||
#[pyclass]
|
|
||||||
struct PyTokioRuntime {
|
|
||||||
runtime: Option<Runtime>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pymethods]
|
|
||||||
impl PyTokioRuntime {
|
|
||||||
fn start(&mut self) -> PyResult<()> {
|
|
||||||
// TODO: allow customization of the runtime like the number of threads
|
|
||||||
let runtime = tokio::runtime::Builder::new_multi_thread()
|
|
||||||
.worker_threads(4)
|
|
||||||
.enable_all()
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
self.runtime = Some(runtime);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn shutdown(&mut self) -> PyResult<()> {
|
|
||||||
let runtime = self
|
|
||||||
.runtime
|
|
||||||
.take()
|
|
||||||
.context("Runtime was already shutdown")?;
|
|
||||||
|
|
||||||
// Dropping the runtime will shut it down
|
|
||||||
drop(runtime);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PyTokioRuntime {
|
|
||||||
/// Get the handle to the Tokio runtime, if it is running.
|
|
||||||
fn handle(&self) -> PyResult<&tokio::runtime::Handle> {
|
|
||||||
let handle = self
|
|
||||||
.runtime
|
|
||||||
.as_ref()
|
|
||||||
.context("Tokio runtime is not running")?
|
|
||||||
.handle();
|
|
||||||
|
|
||||||
Ok(handle)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a handle to the Tokio runtime stored on the reactor instance, or create
|
|
||||||
/// a new one.
|
|
||||||
fn runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRuntime>> {
|
|
||||||
if !reactor.hasattr(TOKIO_RUNTIME_ATTR)? {
|
|
||||||
install_runtime(reactor)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
get_runtime(reactor)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Install a new Tokio runtime on the reactor instance.
|
|
||||||
fn install_runtime(reactor: &Bound<PyAny>) -> PyResult<()> {
|
|
||||||
let py = reactor.py();
|
|
||||||
let runtime = PyTokioRuntime { runtime: None };
|
|
||||||
let runtime = runtime.into_pyobject(py)?;
|
|
||||||
|
|
||||||
// Attach the runtime to the reactor, starting it when the reactor is
|
|
||||||
// running, stopping it when the reactor is shutting down
|
|
||||||
reactor.call_method1("callWhenRunning", (runtime.getattr("start")?,))?;
|
|
||||||
reactor.call_method1(
|
|
||||||
"addSystemEventTrigger",
|
|
||||||
("after", "shutdown", runtime.getattr("shutdown")?),
|
|
||||||
)?;
|
|
||||||
reactor.setattr(TOKIO_RUNTIME_ATTR, runtime)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a reference to a Tokio runtime handle stored on the reactor instance.
|
|
||||||
fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRuntime>> {
|
|
||||||
// This will raise if `TOKIO_RUNTIME_ATTR` is not set or if it is
|
|
||||||
// not a `Runtime`. Careful that this could happen if the user sets it
|
|
||||||
// manually, or if multiple versions of `pyo3-twisted` are used!
|
|
||||||
let runtime: Bound<PyTokioRuntime> = reactor.getattr(TOKIO_RUNTIME_ATTR)?.extract()?;
|
|
||||||
Ok(runtime.borrow())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A reference to the `twisted.internet.defer` module.
|
|
||||||
static DEFER: OnceCell<PyObject> = OnceCell::new();
|
|
||||||
|
|
||||||
/// Access to the `twisted.internet.defer` module.
|
|
||||||
fn defer(py: Python<'_>) -> PyResult<&Bound<PyAny>> {
|
|
||||||
Ok(DEFER
|
|
||||||
.get_or_try_init(|| py.import("twisted.internet.defer").map(Into::into))?
|
|
||||||
.bind(py))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Called when registering modules with python.
|
|
||||||
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
|
||||||
let child_module: Bound<'_, PyModule> = PyModule::new(py, "http_client")?;
|
|
||||||
child_module.add_class::<HttpClient>()?;
|
|
||||||
|
|
||||||
// Make sure we fail early if we can't load some modules
|
|
||||||
defer(py)?;
|
|
||||||
|
|
||||||
m.add_submodule(&child_module)?;
|
|
||||||
|
|
||||||
// We need to manually add the module to sys.modules to make `from
|
|
||||||
// synapse.synapse_rust import http_client` work.
|
|
||||||
py.import("sys")?
|
|
||||||
.getattr("modules")?
|
|
||||||
.set_item("synapse.synapse_rust.http_client", child_module)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pyclass]
|
|
||||||
struct HttpClient {
|
|
||||||
client: reqwest::Client,
|
|
||||||
reactor: PyObject,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pymethods]
|
|
||||||
impl HttpClient {
|
|
||||||
#[new]
|
|
||||||
pub fn py_new(reactor: Bound<PyAny>, user_agent: &str) -> PyResult<HttpClient> {
|
|
||||||
// Make sure the runtime gets installed
|
|
||||||
let _ = runtime(&reactor)?;
|
|
||||||
|
|
||||||
Ok(HttpClient {
|
|
||||||
client: reqwest::Client::builder()
|
|
||||||
.user_agent(user_agent)
|
|
||||||
.build()
|
|
||||||
.context("building reqwest client")?,
|
|
||||||
reactor: reactor.unbind(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get<'a>(
|
|
||||||
&self,
|
|
||||||
py: Python<'a>,
|
|
||||||
url: String,
|
|
||||||
response_limit: usize,
|
|
||||||
) -> PyResult<Bound<'a, PyAny>> {
|
|
||||||
self.send_request(py, self.client.get(url), response_limit)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn post<'a>(
|
|
||||||
&self,
|
|
||||||
py: Python<'a>,
|
|
||||||
url: String,
|
|
||||||
response_limit: usize,
|
|
||||||
headers: HashMap<String, String>,
|
|
||||||
request_body: String,
|
|
||||||
) -> PyResult<Bound<'a, PyAny>> {
|
|
||||||
let mut builder = self.client.post(url);
|
|
||||||
for (name, value) in headers {
|
|
||||||
builder = builder.header(name, value);
|
|
||||||
}
|
|
||||||
builder = builder.body(request_body);
|
|
||||||
|
|
||||||
self.send_request(py, builder, response_limit)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HttpClient {
|
|
||||||
fn send_request<'a>(
|
|
||||||
&self,
|
|
||||||
py: Python<'a>,
|
|
||||||
builder: RequestBuilder,
|
|
||||||
response_limit: usize,
|
|
||||||
) -> PyResult<Bound<'a, PyAny>> {
|
|
||||||
create_deferred(py, self.reactor.bind(py), async move {
|
|
||||||
let response = builder.send().await.context("sending request")?;
|
|
||||||
|
|
||||||
let status = response.status();
|
|
||||||
|
|
||||||
let mut stream = response.bytes_stream();
|
|
||||||
let mut buffer = Vec::new();
|
|
||||||
while let Some(chunk) = stream.try_next().await.context("reading body")? {
|
|
||||||
if buffer.len() + chunk.len() > response_limit {
|
|
||||||
Err(anyhow::anyhow!("Response size too large"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.extend_from_slice(&chunk);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !status.is_success() {
|
|
||||||
return Err(HttpResponseException::new(status, buffer));
|
|
||||||
}
|
|
||||||
|
|
||||||
let r = Python::with_gil(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
|
|
||||||
|
|
||||||
Ok(r)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a twisted deferred from the given future, spawning the task on the
|
|
||||||
/// tokio runtime.
|
|
||||||
///
|
|
||||||
/// Does not handle deferred cancellation or contextvars.
|
|
||||||
fn create_deferred<'py, F, O>(
|
|
||||||
py: Python<'py>,
|
|
||||||
reactor: &Bound<'py, PyAny>,
|
|
||||||
fut: F,
|
|
||||||
) -> PyResult<Bound<'py, PyAny>>
|
|
||||||
where
|
|
||||||
F: Future<Output = PyResult<O>> + Send + 'static,
|
|
||||||
for<'a> O: IntoPyObject<'a> + Send + 'static,
|
|
||||||
{
|
|
||||||
let deferred = defer(py)?.call_method0("Deferred")?;
|
|
||||||
let deferred_callback = deferred.getattr("callback")?.unbind();
|
|
||||||
let deferred_errback = deferred.getattr("errback")?.unbind();
|
|
||||||
|
|
||||||
let rt = runtime(reactor)?;
|
|
||||||
let handle = rt.handle()?;
|
|
||||||
let task = handle.spawn(fut);
|
|
||||||
|
|
||||||
// Unbind the reactor so that we can pass it to the task
|
|
||||||
let reactor = reactor.clone().unbind();
|
|
||||||
handle.spawn(async move {
|
|
||||||
let res = task.await;
|
|
||||||
|
|
||||||
Python::with_gil(move |py| {
|
|
||||||
// Flatten the panic into standard python error
|
|
||||||
let res = match res {
|
|
||||||
Ok(r) => r,
|
|
||||||
Err(join_err) => match join_err.try_into_panic() {
|
|
||||||
Ok(panic_err) => Err(RustPanicError::from_panic(&panic_err)),
|
|
||||||
Err(err) => Err(PyException::new_err(format!("Task cancelled: {err}"))),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Re-bind the reactor
|
|
||||||
let reactor = reactor.bind(py);
|
|
||||||
|
|
||||||
// Send the result to the deferred, via `.callback(..)` or `.errback(..)`
|
|
||||||
match res {
|
|
||||||
Ok(obj) => {
|
|
||||||
reactor
|
|
||||||
.call_method("callFromThread", (deferred_callback, obj), None)
|
|
||||||
.expect("callFromThread should not fail"); // There's nothing we can really do with errors here
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
reactor
|
|
||||||
.call_method("callFromThread", (deferred_errback, err), None)
|
|
||||||
.expect("callFromThread should not fail"); // There's nothing we can really do with errors here
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(deferred)
|
|
||||||
}
|
|
||||||
@@ -27,7 +27,7 @@ pub enum IdentifierError {
|
|||||||
|
|
||||||
impl fmt::Display for IdentifierError {
|
impl fmt::Display for IdentifierError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "{self:?}")
|
write!(f, "{:?}", self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,12 +8,10 @@ pub mod acl;
|
|||||||
pub mod errors;
|
pub mod errors;
|
||||||
pub mod events;
|
pub mod events;
|
||||||
pub mod http;
|
pub mod http;
|
||||||
pub mod http_client;
|
|
||||||
pub mod identifier;
|
pub mod identifier;
|
||||||
pub mod matrix_const;
|
pub mod matrix_const;
|
||||||
pub mod push;
|
pub mod push;
|
||||||
pub mod rendezvous;
|
pub mod rendezvous;
|
||||||
pub mod segmenter;
|
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref LOGGING_HANDLE: ResetHandle = pyo3_log::init();
|
static ref LOGGING_HANDLE: ResetHandle = pyo3_log::init();
|
||||||
@@ -52,9 +50,7 @@ fn synapse_rust(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
|||||||
acl::register_module(py, m)?;
|
acl::register_module(py, m)?;
|
||||||
push::register_module(py, m)?;
|
push::register_module(py, m)?;
|
||||||
events::register_module(py, m)?;
|
events::register_module(py, m)?;
|
||||||
http_client::register_module(py, m)?;
|
|
||||||
rendezvous::register_module(py, m)?;
|
rendezvous::register_module(py, m)?;
|
||||||
segmenter::register_module(py, m)?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -289,29 +289,6 @@ pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
|
|||||||
default_enabled: true,
|
default_enabled: true,
|
||||||
}];
|
}];
|
||||||
|
|
||||||
pub const BASE_APPEND_POSTCONTENT_RULES: &[PushRule] = &[
|
|
||||||
PushRule {
|
|
||||||
rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.unsubscribed_thread"),
|
|
||||||
priority_class: 6,
|
|
||||||
conditions: Cow::Borrowed(&[Condition::Known(
|
|
||||||
KnownCondition::Msc4306ThreadSubscription { subscribed: false },
|
|
||||||
)]),
|
|
||||||
actions: Cow::Borrowed(&[]),
|
|
||||||
default: true,
|
|
||||||
default_enabled: true,
|
|
||||||
},
|
|
||||||
PushRule {
|
|
||||||
rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.subscribed_thread"),
|
|
||||||
priority_class: 6,
|
|
||||||
conditions: Cow::Borrowed(&[Condition::Known(
|
|
||||||
KnownCondition::Msc4306ThreadSubscription { subscribed: true },
|
|
||||||
)]),
|
|
||||||
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
|
|
||||||
default: true,
|
|
||||||
default_enabled: true,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||||
PushRule {
|
PushRule {
|
||||||
rule_id: Cow::Borrowed("global/underride/.m.rule.call"),
|
rule_id: Cow::Borrowed("global/underride/.m.rule.call"),
|
||||||
@@ -729,7 +706,6 @@ lazy_static! {
|
|||||||
.iter()
|
.iter()
|
||||||
.chain(BASE_APPEND_OVERRIDE_RULES.iter())
|
.chain(BASE_APPEND_OVERRIDE_RULES.iter())
|
||||||
.chain(BASE_APPEND_CONTENT_RULES.iter())
|
.chain(BASE_APPEND_CONTENT_RULES.iter())
|
||||||
.chain(BASE_APPEND_POSTCONTENT_RULES.iter())
|
|
||||||
.chain(BASE_APPEND_UNDERRIDE_RULES.iter())
|
.chain(BASE_APPEND_UNDERRIDE_RULES.iter())
|
||||||
.map(|rule| { (&*rule.rule_id, rule) })
|
.map(|rule| { (&*rule.rule_id, rule) })
|
||||||
.collect();
|
.collect();
|
||||||
|
|||||||
@@ -106,11 +106,8 @@ pub struct PushRuleEvaluator {
|
|||||||
/// flag as MSC1767 (extensible events core).
|
/// flag as MSC1767 (extensible events core).
|
||||||
msc3931_enabled: bool,
|
msc3931_enabled: bool,
|
||||||
|
|
||||||
/// If MSC4210 (remove legacy mentions) is enabled.
|
// If MSC4210 (remove legacy mentions) is enabled.
|
||||||
msc4210_enabled: bool,
|
msc4210_enabled: bool,
|
||||||
|
|
||||||
/// If MSC4306 (thread subscriptions) is enabled.
|
|
||||||
msc4306_enabled: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pymethods]
|
#[pymethods]
|
||||||
@@ -129,7 +126,6 @@ impl PushRuleEvaluator {
|
|||||||
room_version_feature_flags,
|
room_version_feature_flags,
|
||||||
msc3931_enabled,
|
msc3931_enabled,
|
||||||
msc4210_enabled,
|
msc4210_enabled,
|
||||||
msc4306_enabled,
|
|
||||||
))]
|
))]
|
||||||
pub fn py_new(
|
pub fn py_new(
|
||||||
flattened_keys: BTreeMap<String, JsonValue>,
|
flattened_keys: BTreeMap<String, JsonValue>,
|
||||||
@@ -142,7 +138,6 @@ impl PushRuleEvaluator {
|
|||||||
room_version_feature_flags: Vec<String>,
|
room_version_feature_flags: Vec<String>,
|
||||||
msc3931_enabled: bool,
|
msc3931_enabled: bool,
|
||||||
msc4210_enabled: bool,
|
msc4210_enabled: bool,
|
||||||
msc4306_enabled: bool,
|
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let body = match flattened_keys.get("content.body") {
|
let body = match flattened_keys.get("content.body") {
|
||||||
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone().into_owned(),
|
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone().into_owned(),
|
||||||
@@ -161,7 +156,6 @@ impl PushRuleEvaluator {
|
|||||||
room_version_feature_flags,
|
room_version_feature_flags,
|
||||||
msc3931_enabled,
|
msc3931_enabled,
|
||||||
msc4210_enabled,
|
msc4210_enabled,
|
||||||
msc4306_enabled,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -173,19 +167,12 @@ impl PushRuleEvaluator {
|
|||||||
///
|
///
|
||||||
/// Returns the set of actions, if any, that match (filtering out any
|
/// Returns the set of actions, if any, that match (filtering out any
|
||||||
/// `dont_notify` and `coalesce` actions).
|
/// `dont_notify` and `coalesce` actions).
|
||||||
///
|
#[pyo3(signature = (push_rules, user_id=None, display_name=None))]
|
||||||
/// msc4306_thread_subscription_state: (Only populated if MSC4306 is enabled)
|
|
||||||
/// The thread subscription state corresponding to the thread containing this event.
|
|
||||||
/// - `None` if the event is not in a thread, or if MSC4306 is disabled.
|
|
||||||
/// - `Some(true)` if the event is in a thread and the user has a subscription for that thread
|
|
||||||
/// - `Some(false)` if the event is in a thread and the user does NOT have a subscription for that thread
|
|
||||||
#[pyo3(signature = (push_rules, user_id=None, display_name=None, msc4306_thread_subscription_state=None))]
|
|
||||||
pub fn run(
|
pub fn run(
|
||||||
&self,
|
&self,
|
||||||
push_rules: &FilteredPushRules,
|
push_rules: &FilteredPushRules,
|
||||||
user_id: Option<&str>,
|
user_id: Option<&str>,
|
||||||
display_name: Option<&str>,
|
display_name: Option<&str>,
|
||||||
msc4306_thread_subscription_state: Option<bool>,
|
|
||||||
) -> Vec<Action> {
|
) -> Vec<Action> {
|
||||||
'outer: for (push_rule, enabled) in push_rules.iter() {
|
'outer: for (push_rule, enabled) in push_rules.iter() {
|
||||||
if !enabled {
|
if !enabled {
|
||||||
@@ -217,12 +204,7 @@ impl PushRuleEvaluator {
|
|||||||
Condition::Known(KnownCondition::RoomVersionSupports { feature: _ }),
|
Condition::Known(KnownCondition::RoomVersionSupports { feature: _ }),
|
||||||
);
|
);
|
||||||
|
|
||||||
match self.match_condition(
|
match self.match_condition(condition, user_id, display_name) {
|
||||||
condition,
|
|
||||||
user_id,
|
|
||||||
display_name,
|
|
||||||
msc4306_thread_subscription_state,
|
|
||||||
) {
|
|
||||||
Ok(true) => {}
|
Ok(true) => {}
|
||||||
Ok(false) => continue 'outer,
|
Ok(false) => continue 'outer,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@@ -255,20 +237,14 @@ impl PushRuleEvaluator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Check if the given condition matches.
|
/// Check if the given condition matches.
|
||||||
#[pyo3(signature = (condition, user_id=None, display_name=None, msc4306_thread_subscription_state=None))]
|
#[pyo3(signature = (condition, user_id=None, display_name=None))]
|
||||||
fn matches(
|
fn matches(
|
||||||
&self,
|
&self,
|
||||||
condition: Condition,
|
condition: Condition,
|
||||||
user_id: Option<&str>,
|
user_id: Option<&str>,
|
||||||
display_name: Option<&str>,
|
display_name: Option<&str>,
|
||||||
msc4306_thread_subscription_state: Option<bool>,
|
|
||||||
) -> bool {
|
) -> bool {
|
||||||
match self.match_condition(
|
match self.match_condition(&condition, user_id, display_name) {
|
||||||
&condition,
|
|
||||||
user_id,
|
|
||||||
display_name,
|
|
||||||
msc4306_thread_subscription_state,
|
|
||||||
) {
|
|
||||||
Ok(true) => true,
|
Ok(true) => true,
|
||||||
Ok(false) => false,
|
Ok(false) => false,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@@ -286,7 +262,6 @@ impl PushRuleEvaluator {
|
|||||||
condition: &Condition,
|
condition: &Condition,
|
||||||
user_id: Option<&str>,
|
user_id: Option<&str>,
|
||||||
display_name: Option<&str>,
|
display_name: Option<&str>,
|
||||||
msc4306_thread_subscription_state: Option<bool>,
|
|
||||||
) -> Result<bool, Error> {
|
) -> Result<bool, Error> {
|
||||||
let known_condition = match condition {
|
let known_condition = match condition {
|
||||||
Condition::Known(known) => known,
|
Condition::Known(known) => known,
|
||||||
@@ -418,13 +393,6 @@ impl PushRuleEvaluator {
|
|||||||
&& self.room_version_feature_flags.contains(&flag)
|
&& self.room_version_feature_flags.contains(&flag)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
KnownCondition::Msc4306ThreadSubscription { subscribed } => {
|
|
||||||
if !self.msc4306_enabled {
|
|
||||||
false
|
|
||||||
} else {
|
|
||||||
msc4306_thread_subscription_state == Some(*subscribed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
@@ -568,11 +536,10 @@ fn push_rule_evaluator() {
|
|||||||
vec![],
|
vec![],
|
||||||
true,
|
true,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"), None);
|
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"));
|
||||||
assert_eq!(result.len(), 3);
|
assert_eq!(result.len(), 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -599,7 +566,6 @@ fn test_requires_room_version_supports_condition() {
|
|||||||
flags,
|
flags,
|
||||||
true,
|
true,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -609,7 +575,6 @@ fn test_requires_room_version_supports_condition() {
|
|||||||
&FilteredPushRules::default(),
|
&FilteredPushRules::default(),
|
||||||
Some("@bob:example.org"),
|
Some("@bob:example.org"),
|
||||||
None,
|
None,
|
||||||
None,
|
|
||||||
);
|
);
|
||||||
assert_eq!(result.len(), 3);
|
assert_eq!(result.len(), 3);
|
||||||
|
|
||||||
@@ -628,17 +593,7 @@ fn test_requires_room_version_supports_condition() {
|
|||||||
};
|
};
|
||||||
let rules = PushRules::new(vec![custom_rule]);
|
let rules = PushRules::new(vec![custom_rule]);
|
||||||
result = evaluator.run(
|
result = evaluator.run(
|
||||||
&FilteredPushRules::py_new(
|
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false, false),
|
||||||
rules,
|
|
||||||
BTreeMap::new(),
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
),
|
|
||||||
None,
|
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -369,10 +369,6 @@ pub enum KnownCondition {
|
|||||||
RoomVersionSupports {
|
RoomVersionSupports {
|
||||||
feature: Cow<'static, str>,
|
feature: Cow<'static, str>,
|
||||||
},
|
},
|
||||||
#[serde(rename = "io.element.msc4306.thread_subscription")]
|
|
||||||
Msc4306ThreadSubscription {
|
|
||||||
subscribed: bool,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'source> IntoPyObject<'source> for Condition {
|
impl<'source> IntoPyObject<'source> for Condition {
|
||||||
@@ -527,7 +523,6 @@ impl PushRules {
|
|||||||
.chain(base_rules::BASE_APPEND_OVERRIDE_RULES.iter())
|
.chain(base_rules::BASE_APPEND_OVERRIDE_RULES.iter())
|
||||||
.chain(self.content.iter())
|
.chain(self.content.iter())
|
||||||
.chain(base_rules::BASE_APPEND_CONTENT_RULES.iter())
|
.chain(base_rules::BASE_APPEND_CONTENT_RULES.iter())
|
||||||
.chain(base_rules::BASE_APPEND_POSTCONTENT_RULES.iter())
|
|
||||||
.chain(self.room.iter())
|
.chain(self.room.iter())
|
||||||
.chain(self.sender.iter())
|
.chain(self.sender.iter())
|
||||||
.chain(self.underride.iter())
|
.chain(self.underride.iter())
|
||||||
@@ -552,13 +547,11 @@ pub struct FilteredPushRules {
|
|||||||
msc3664_enabled: bool,
|
msc3664_enabled: bool,
|
||||||
msc4028_push_encrypted_events: bool,
|
msc4028_push_encrypted_events: bool,
|
||||||
msc4210_enabled: bool,
|
msc4210_enabled: bool,
|
||||||
msc4306_enabled: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pymethods]
|
#[pymethods]
|
||||||
impl FilteredPushRules {
|
impl FilteredPushRules {
|
||||||
#[new]
|
#[new]
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
pub fn py_new(
|
pub fn py_new(
|
||||||
push_rules: PushRules,
|
push_rules: PushRules,
|
||||||
enabled_map: BTreeMap<String, bool>,
|
enabled_map: BTreeMap<String, bool>,
|
||||||
@@ -567,7 +560,6 @@ impl FilteredPushRules {
|
|||||||
msc3664_enabled: bool,
|
msc3664_enabled: bool,
|
||||||
msc4028_push_encrypted_events: bool,
|
msc4028_push_encrypted_events: bool,
|
||||||
msc4210_enabled: bool,
|
msc4210_enabled: bool,
|
||||||
msc4306_enabled: bool,
|
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
push_rules,
|
push_rules,
|
||||||
@@ -577,7 +569,6 @@ impl FilteredPushRules {
|
|||||||
msc3664_enabled,
|
msc3664_enabled,
|
||||||
msc4028_push_encrypted_events,
|
msc4028_push_encrypted_events,
|
||||||
msc4210_enabled,
|
msc4210_enabled,
|
||||||
msc4306_enabled,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -628,10 +619,6 @@ impl FilteredPushRules {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !self.msc4306_enabled && rule.rule_id.contains("/.io.element.msc4306.rule.") {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
true
|
||||||
})
|
})
|
||||||
.map(|r| {
|
.map(|r| {
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
|
|||||||
headers.typed_insert(AccessControlAllowOrigin::ANY);
|
headers.typed_insert(AccessControlAllowOrigin::ANY);
|
||||||
headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG]));
|
headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG]));
|
||||||
headers.typed_insert(Pragma::no_cache());
|
headers.typed_insert(Pragma::no_cache());
|
||||||
headers.typed_insert(CacheControl::new().with_no_store().with_no_transform());
|
headers.typed_insert(CacheControl::new().with_no_store());
|
||||||
headers.typed_insert(session.etag());
|
headers.typed_insert(session.etag());
|
||||||
headers.typed_insert(session.expires());
|
headers.typed_insert(session.expires());
|
||||||
headers.typed_insert(session.last_modified());
|
headers.typed_insert(session.last_modified());
|
||||||
@@ -192,12 +192,10 @@ impl RendezvousHandler {
|
|||||||
"url": uri,
|
"url": uri,
|
||||||
})
|
})
|
||||||
.to_string();
|
.to_string();
|
||||||
let length = response.len() as _;
|
|
||||||
|
|
||||||
let mut response = Response::new(response.as_bytes());
|
let mut response = Response::new(response.as_bytes());
|
||||||
*response.status_mut() = StatusCode::CREATED;
|
*response.status_mut() = StatusCode::CREATED;
|
||||||
response.headers_mut().typed_insert(ContentType::json());
|
response.headers_mut().typed_insert(ContentType::json());
|
||||||
response.headers_mut().typed_insert(ContentLength(length));
|
|
||||||
prepare_headers(response.headers_mut(), &session);
|
prepare_headers(response.headers_mut(), &session);
|
||||||
http_response_to_twisted(twisted_request, response)?;
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
@@ -301,7 +299,6 @@ impl RendezvousHandler {
|
|||||||
// proxy/cache setup which strips the ETag header if there is no Content-Type set.
|
// proxy/cache setup which strips the ETag header if there is no Content-Type set.
|
||||||
// Specifically, we noticed this behaviour when placing Synapse behind Cloudflare.
|
// Specifically, we noticed this behaviour when placing Synapse behind Cloudflare.
|
||||||
response.headers_mut().typed_insert(ContentType::text());
|
response.headers_mut().typed_insert(ContentType::text());
|
||||||
response.headers_mut().typed_insert(ContentLength(0));
|
|
||||||
|
|
||||||
http_response_to_twisted(twisted_request, response)?;
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
@@ -319,7 +316,6 @@ impl RendezvousHandler {
|
|||||||
response
|
response
|
||||||
.headers_mut()
|
.headers_mut()
|
||||||
.typed_insert(AccessControlAllowOrigin::ANY);
|
.typed_insert(AccessControlAllowOrigin::ANY);
|
||||||
response.headers_mut().typed_insert(ContentLength(0));
|
|
||||||
http_response_to_twisted(twisted_request, response)?;
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -1,33 +0,0 @@
|
|||||||
use icu_segmenter::options::WordBreakInvariantOptions;
|
|
||||||
use icu_segmenter::WordSegmenter;
|
|
||||||
use pyo3::prelude::*;
|
|
||||||
|
|
||||||
#[pyfunction]
|
|
||||||
pub fn parse_words(text: &str) -> PyResult<Vec<String>> {
|
|
||||||
let segmenter = WordSegmenter::new_auto(WordBreakInvariantOptions::default());
|
|
||||||
let mut parts = Vec::new();
|
|
||||||
let mut last = 0usize;
|
|
||||||
|
|
||||||
// `segment_str` gives us word boundaries as a vector of indexes. Use that
|
|
||||||
// to build a vector of words, and return.
|
|
||||||
for boundary in segmenter.segment_str(text) {
|
|
||||||
if boundary > last {
|
|
||||||
parts.push(text[last..boundary].to_string());
|
|
||||||
}
|
|
||||||
last = boundary;
|
|
||||||
}
|
|
||||||
Ok(parts)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
|
||||||
let child_module = PyModule::new(py, "segmenter")?;
|
|
||||||
child_module.add_function(wrap_pyfunction!(parse_words, m)?)?;
|
|
||||||
|
|
||||||
m.add_submodule(&child_module)?;
|
|
||||||
|
|
||||||
py.import("sys")?
|
|
||||||
.getattr("modules")?
|
|
||||||
.set_item("synapse.synapse_rust.segmenter", child_module)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,2 +0,0 @@
|
|||||||
If you want to update the meta schema, copy this folder and increase its version
|
|
||||||
number instead.
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
{
|
|
||||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
||||||
"$id": "https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json",
|
|
||||||
"$vocabulary": {
|
|
||||||
"https://json-schema.org/draft/2020-12/vocab/core": true,
|
|
||||||
"https://json-schema.org/draft/2020-12/vocab/applicator": true,
|
|
||||||
"https://json-schema.org/draft/2020-12/vocab/unevaluated": true,
|
|
||||||
"https://json-schema.org/draft/2020-12/vocab/validation": true,
|
|
||||||
"https://json-schema.org/draft/2020-12/vocab/meta-data": true,
|
|
||||||
"https://json-schema.org/draft/2020-12/vocab/format-annotation": true,
|
|
||||||
"https://json-schema.org/draft/2020-12/vocab/content": true,
|
|
||||||
"https://element-hq.github.io/synapse/latest/schema/v1/vocab/documentation": false
|
|
||||||
},
|
|
||||||
"$ref": "https://json-schema.org/draft/2020-12/schema",
|
|
||||||
"properties": {
|
|
||||||
"io.element.type_name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Human-readable type of a schema that is displayed instead of the standard JSON Schema types like `object` or `integer`. In case the JSON Schema type contains `null`, this information should be presented alongside the human-readable type name.",
|
|
||||||
"examples": ["duration", "byte size"]
|
|
||||||
},
|
|
||||||
"io.element.post_description": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Additional description of a schema, better suited to be placed less prominently in the generated documentation, e.g., at the end of a section after listings of items and properties.",
|
|
||||||
"examples": [
|
|
||||||
"### Advanced uses\n\nThe spent coffee grounds can be added to compost for improving soil and growing plants."
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta http-equiv="refresh" content="0; URL=../meta.schema.json">
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Redirecting to ../meta.schema.json…</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>Redirecting to <a href="../meta.schema.json">../meta.schema.json</a>…</p>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
@@ -32,7 +32,7 @@ DISTS = (
|
|||||||
"debian:sid", # (rolling distro, no EOL)
|
"debian:sid", # (rolling distro, no EOL)
|
||||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
||||||
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
|
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
|
||||||
"ubuntu:plucky", # 25.04 (EOL 2026-01)
|
"ubuntu:oracular", # 24.10 (EOL 2025-07)
|
||||||
"debian:trixie", # (EOL not specified yet)
|
"debian:trixie", # (EOL not specified yet)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -243,7 +243,7 @@ def do_lint() -> Set[str]:
|
|||||||
importlib.import_module(module_info.name)
|
importlib.import_module(module_info.name)
|
||||||
except ModelCheckerException as e:
|
except ModelCheckerException as e:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Bad annotation found when importing %s", module_info.name
|
f"Bad annotation found when importing {module_info.name}"
|
||||||
)
|
)
|
||||||
failures.add(format_model_checker_exception(e))
|
failures.add(format_model_checker_exception(e))
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user