mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-05 01:10:13 +00:00
Compare commits
197 Commits
madlittlem
...
anoa/fix_g
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5fa9d0043b | ||
|
|
53b50ec781 | ||
|
|
2f65b9e001 | ||
|
|
418c9f3fe5 | ||
|
|
eac862629f | ||
|
|
67f22a200d | ||
|
|
da6c0cae96 | ||
|
|
b8f6ad2736 | ||
|
|
ecc90593cb | ||
|
|
a4f9274107 | ||
|
|
ec7554b768 | ||
|
|
d2c582ef3c | ||
|
|
2d07bd7fd2 | ||
|
|
a7303c5311 | ||
|
|
690b3a4fcc | ||
|
|
d399d7649a | ||
|
|
9d9275da5a | ||
|
|
ef80338c2d | ||
|
|
be75de2cfc | ||
|
|
07cfb69778 | ||
|
|
c0d6998dea | ||
|
|
8390138fa4 | ||
|
|
627be7e0a7 | ||
|
|
47fb4b43ca | ||
|
|
715cc5ee37 | ||
|
|
d440cfc9e2 | ||
|
|
18f07fdc4c | ||
|
|
e3344dc0c3 | ||
|
|
bcbbccca23 | ||
|
|
8f01eb8ee0 | ||
|
|
21d125e29a | ||
|
|
638fa0f33d | ||
|
|
38afd10823 | ||
|
|
87cfe56d14 | ||
|
|
631eed91f1 | ||
|
|
7b8831310f | ||
|
|
fb12d516cd | ||
|
|
dde4e0e83d | ||
|
|
8696551e7f | ||
|
|
28bc486bff | ||
|
|
ca27938257 | ||
|
|
036fb87584 | ||
|
|
abe974cd2b | ||
|
|
5e3839e2af | ||
|
|
0ae1f105b2 | ||
|
|
2443760d0d | ||
|
|
4f7ffc13a7 | ||
|
|
340bdd896a | ||
|
|
957456ed3a | ||
|
|
459ebe07fc | ||
|
|
527e831b61 | ||
|
|
76b012c3f5 | ||
|
|
7069636c2d | ||
|
|
dde1e012a4 | ||
|
|
533d5e0a7a | ||
|
|
26aaaf9e48 | ||
|
|
4a37c4d87a | ||
|
|
d67280f5d8 | ||
|
|
42bbff8294 | ||
|
|
5465c68553 | ||
|
|
1d2ddbc76e | ||
|
|
70c044db8e | ||
|
|
6835e7be0d | ||
|
|
d27ff161f5 | ||
|
|
06a84f4fe0 | ||
|
|
1c093509ce | ||
|
|
0615b64bb4 | ||
|
|
c284d8cb24 | ||
|
|
5fff5a1893 | ||
|
|
765817a1ad | ||
|
|
396de6544a | ||
|
|
d1c96ee0f2 | ||
|
|
5adb08f3c9 | ||
|
|
2aab171042 | ||
|
|
0aeb95fb07 | ||
|
|
72020f3f2c | ||
|
|
ad8dcc2119 | ||
|
|
84e1d15232 | ||
|
|
2b7a398b14 | ||
|
|
81848e8193 | ||
|
|
be3ecb332a | ||
|
|
14c114b9fd | ||
|
|
2eb6239ad8 | ||
|
|
26583f8623 | ||
|
|
265e5fe384 | ||
|
|
5143f93dc9 | ||
|
|
2f2b854ac1 | ||
|
|
8f61bdb470 | ||
|
|
7c32988f6b | ||
|
|
688f635b59 | ||
|
|
04721c85e6 | ||
|
|
d2a966f922 | ||
|
|
dee6ba57a6 | ||
|
|
e2ec3b7d0d | ||
|
|
acb9ec3c38 | ||
|
|
6ff181dbc7 | ||
|
|
fd8fa97b6a | ||
|
|
5266e423e2 | ||
|
|
0458f691b6 | ||
|
|
25fa555395 | ||
|
|
7708801d56 | ||
|
|
d3fc638c29 | ||
|
|
6c292dc4ee | ||
|
|
120389b077 | ||
|
|
71b34b3a07 | ||
|
|
e766f325af | ||
|
|
512b3f50cf | ||
|
|
0fbf296c99 | ||
|
|
0c8594c9a8 | ||
|
|
35c9cbb09d | ||
|
|
9680804496 | ||
|
|
8f63e2246a | ||
|
|
aa83d660d5 | ||
|
|
641ced06a2 | ||
|
|
354f1cc219 | ||
|
|
478f593b6c | ||
|
|
cd6c424adb | ||
|
|
b70f668a8c | ||
|
|
9c4ba13a10 | ||
|
|
0447496549 | ||
|
|
9ed0d36fe2 | ||
|
|
5857d2de59 | ||
|
|
b10f3f5959 | ||
|
|
fd29e3219c | ||
|
|
d308469e90 | ||
|
|
daf33e4954 | ||
|
|
ddc7627b22 | ||
|
|
5be7679dd9 | ||
|
|
e7d98d3429 | ||
|
|
d05f44a1c6 | ||
|
|
8d5d87fb0a | ||
|
|
9a88d25f8e | ||
|
|
5a9ca1e3d9 | ||
|
|
83aca3f097 | ||
|
|
d80f515622 | ||
|
|
4367fb2d07 | ||
|
|
b596faa4ec | ||
|
|
6f9fab1089 | ||
|
|
84d64251dc | ||
|
|
2bed3fb566 | ||
|
|
2c60b67a95 | ||
|
|
6358afff8d | ||
|
|
f7b547e2d8 | ||
|
|
8f7bd946de | ||
|
|
4f80fa4b0a | ||
|
|
b2592667a4 | ||
|
|
769d30a247 | ||
|
|
7ecfe8b1a8 | ||
|
|
e1036ffa48 | ||
|
|
8c98cf7e55 | ||
|
|
ec64c3e88d | ||
|
|
ada3a3b2b3 | ||
|
|
9cc4001778 | ||
|
|
c68c5dd07b | ||
|
|
92bdf77c3f | ||
|
|
e43bf10187 | ||
|
|
6146dbad3e | ||
|
|
ca655e4020 | ||
|
|
7951d41b4e | ||
|
|
e235099ab9 | ||
|
|
3e865e403b | ||
|
|
35e7e659f6 | ||
|
|
39e4f27347 | ||
|
|
6fe8137a4a | ||
|
|
fcffd2e897 | ||
|
|
d48e69ad4c | ||
|
|
74fdbc7b75 | ||
|
|
4d55f2f301 | ||
|
|
dfccde9f60 | ||
|
|
4b43e6fe02 | ||
|
|
b2997a8f20 | ||
|
|
bff4a11b3f | ||
|
|
09a489e198 | ||
|
|
537e14169e | ||
|
|
68068de3a4 | ||
|
|
356cc4a0a1 | ||
|
|
27fc3389f3 | ||
|
|
df2cfb3932 | ||
|
|
c339021ce8 | ||
|
|
499f947c67 | ||
|
|
e76a9af4d7 | ||
|
|
eec1ca6e93 | ||
|
|
56b5759c0f | ||
|
|
767177ca5a | ||
|
|
5b8e6e7911 | ||
|
|
6a6be6fbe2 | ||
|
|
21c7841228 | ||
|
|
5b55e3f15d | ||
|
|
0e2b92bcbc | ||
|
|
481987eb83 | ||
|
|
5fd30c7ea7 | ||
|
|
d527c794fb | ||
|
|
19fe3f001e | ||
|
|
f8a44638eb | ||
|
|
7ec5e60671 | ||
|
|
48184eefa3 | ||
|
|
205d9e4fc4 |
29
.ci/scripts/triage_labelled_issue.sh
Executable file
29
.ci/scripts/triage_labelled_issue.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# 1) Resolve project ID.
|
||||
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
|
||||
|
||||
# 2) Find existing item (project card) for this issue.
|
||||
ITEM_ID=$(
|
||||
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
|
||||
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
|
||||
)
|
||||
|
||||
# 3) If one doesn't exist, add this issue to the project.
|
||||
if [ -z "${ITEM_ID:-}" ]; then
|
||||
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
|
||||
fi
|
||||
|
||||
# 4) Get Status field id + the option id for TARGET_STATUS.
|
||||
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
|
||||
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
|
||||
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
|
||||
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
|
||||
|
||||
if [ -z "${OPTION_ID:-}" ]; then
|
||||
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
|
||||
fi
|
||||
|
||||
# 5) Set Status (moves item to the matching column in the board view).
|
||||
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"
|
||||
12
.github/workflows/docker.yml
vendored
12
.github/workflows/docker.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||
@@ -41,13 +41,13 @@ jobs:
|
||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -102,14 +102,14 @@ jobs:
|
||||
merge-multiple: true
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@@ -120,7 +120,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
|
||||
6
.github/workflows/docs-pr.yaml
vendored
6
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
|
||||
4
.github/workflows/docs.yaml
vendored
4
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
|
||||
6
.github/workflows/fix_lint.yaml
vendored
6
.github/workflows/fix_lint.yaml
vendored
@@ -18,14 +18,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
|
||||
26
.github/workflows/latest_deps.yml
vendored
26
.github/workflows/latest_deps.yml
vendored
@@ -42,12 +42,12 @@ jobs:
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
@@ -77,13 +77,13 @@ jobs:
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
@@ -93,7 +93,7 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
@@ -152,13 +152,13 @@ jobs:
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
@@ -202,14 +202,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out synapse codebase
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
|
||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -234,7 +234,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
||||
name: "Check locked dependencies have sdists"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install tomli
|
||||
|
||||
8
.github/workflows/push_complement_image.yml
vendored
8
.github/workflows/push_complement_image.yml
vendored
@@ -33,22 +33,22 @@ jobs:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout specific branch (debug build)
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
- name: Checkout clean copy of develop (scheduled build)
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
ref: develop
|
||||
- name: Checkout clean copy of master (on-push)
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
if: github.event_name == 'push'
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
||||
22
.github/workflows/release-artifacts.yml
vendored
22
.github/workflows/release-artifacts.yml
vendored
@@ -27,8 +27,8 @@ jobs:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: set-distros
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: src
|
||||
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
@@ -114,8 +114,8 @@ jobs:
|
||||
os:
|
||||
- ubuntu-24.04
|
||||
- ubuntu-24.04-arm
|
||||
- macos-13 # This uses x86-64
|
||||
- macos-14 # This uses arm64
|
||||
- macos-15-intel # This uses x86-64
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
exclude:
|
||||
# Don't build macos wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "macos-13"
|
||||
os: "macos-15-intel"
|
||||
- is_pr: true
|
||||
os: "macos-14"
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
@@ -132,9 +132,9 @@ jobs:
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||
# here, because `python` on osx points to Python 2.7.
|
||||
@@ -165,8 +165,8 @@ jobs:
|
||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
|
||||
8
.github/workflows/schema.yaml
vendored
8
.github/workflows/schema.yaml
vendored
@@ -14,8 +14,8 @@ jobs:
|
||||
name: Ensure Synapse config schema is valid
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install check-jsonschema
|
||||
@@ -40,8 +40,8 @@ jobs:
|
||||
name: Ensure generated documentation is up-to-date
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install PyYAML
|
||||
|
||||
114
.github/workflows/tests.yml
vendored
114
.github/workflows/tests.yml
vendored
@@ -86,12 +86,12 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -106,8 +106,8 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||
@@ -116,8 +116,8 @@ jobs:
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: .ci/scripts/check_lockfile.py
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -151,13 +151,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -174,7 +174,7 @@ jobs:
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
.mypy_cache
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
@@ -195,11 +195,11 @@ jobs:
|
||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
@@ -213,14 +213,14 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
@@ -233,14 +233,14 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
components: clippy
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
@@ -252,14 +252,14 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2025-04-23
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
@@ -270,13 +270,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -306,16 +306,16 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
# We use nightly so that we can use some unstable options that we use in
|
||||
# `.rustfmt.toml`.
|
||||
toolchain: nightly-2025-04-23
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
@@ -326,8 +326,8 @@ jobs:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install rstcheck"
|
||||
@@ -376,8 +376,8 @@ jobs:
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: get-matrix
|
||||
@@ -397,7 +397,7 @@ jobs:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
@@ -412,10 +412,10 @@ jobs:
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -453,13 +453,13 @@ jobs:
|
||||
- changes
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
@@ -468,7 +468,7 @@ jobs:
|
||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
@@ -518,7 +518,7 @@ jobs:
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -568,15 +568,15 @@ jobs:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
@@ -615,7 +615,7 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -659,7 +659,7 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Add PostgreSQL apt repository
|
||||
# We need a version of pg_dump that can handle the version of
|
||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||
@@ -714,20 +714,20 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout synapse codebase
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
|
||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -750,13 +750,13 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo test
|
||||
|
||||
@@ -770,13 +770,13 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
|
||||
53
.github/workflows/triage_labelled.yml
vendored
53
.github/workflows/triage_labelled.yml
vendored
@@ -6,43 +6,26 @@ on:
|
||||
|
||||
jobs:
|
||||
move_needs_info:
|
||||
name: Move X-Needs-Info on the triage board
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
PROJECT_OWNER: matrix-org
|
||||
# Backend issue triage board.
|
||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
||||
PROJECT_NUMBER: 67
|
||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
||||
# This field is case-sensitive.
|
||||
TARGET_STATUS: Needs info
|
||||
steps:
|
||||
- uses: actions/add-to-project@c0c5949b017d0d4a39f7ba888255881bdac2a823 # v1.0.2
|
||||
id: add_project
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
# This action will error if the issue already exists on the project. Which is
|
||||
# common as `X-Needs-Info` will often be added to issues that are already in
|
||||
# the triage queue. Prevent the whole job from failing in this case.
|
||||
continue-on-error: true
|
||||
- name: Set status
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
run: |
|
||||
gh api graphql -f query='
|
||||
mutation(
|
||||
$project: ID!
|
||||
$item: ID!
|
||||
$fieldid: ID!
|
||||
$columnid: String!
|
||||
) {
|
||||
updateProjectV2ItemFieldValue(
|
||||
input: {
|
||||
projectId: $project
|
||||
itemId: $item
|
||||
fieldId: $fieldid
|
||||
value: {
|
||||
singleSelectOptionId: $columnid
|
||||
}
|
||||
}
|
||||
) {
|
||||
projectV2Item {
|
||||
id
|
||||
}
|
||||
}
|
||||
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
||||
# Only clone the script file we care about, instead of the whole repo.
|
||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
- name: Ensure issue exists on the board, then set Status
|
||||
run: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
24
.github/workflows/twisted_trunk.yml
vendored
24
.github/workflows/twisted_trunk.yml
vendored
@@ -43,13 +43,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -70,14 +70,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -117,13 +117,13 @@ jobs:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
@@ -175,14 +175,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v4 for synapse
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
|
||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -217,7 +217,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
309
CHANGES.md
309
CHANGES.md
@@ -1,3 +1,312 @@
|
||||
# Synapse 1.140.0 (2025-10-14)
|
||||
|
||||
## Compatibility notice for users of `synapse-s3-storage-provider`
|
||||
|
||||
Deployments that make use of the
|
||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider)
|
||||
module must upgrade to
|
||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0).
|
||||
Using older versions of the module with this release of Synapse will prevent
|
||||
users from being able to upload or download media.
|
||||
|
||||
|
||||
No significant changes since 1.140.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.140.0rc1 (2025-10-10)
|
||||
|
||||
## Features
|
||||
|
||||
- Add [a new Media Query by ID Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/media_admin_api.html#query-a-piece-of-media-by-id) that allows server admins to query and investigate the metadata of local or cached remote media via
|
||||
the `origin/media_id` identifier found in a [Matrix Content URI](https://spec.matrix.org/v1.14/client-server-api/#matrix-content-mxc-uris). ([\#18911](https://github.com/element-hq/synapse/issues/18911))
|
||||
- Add [a new Fetch Event Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/fetch_event.html) to fetch an event by ID. ([\#18963](https://github.com/element-hq/synapse/issues/18963))
|
||||
- Update [MSC4284: Policy Servers](https://github.com/matrix-org/matrix-spec-proposals/pull/4284) implementation to support signatures when available. ([\#18934](https://github.com/element-hq/synapse/issues/18934))
|
||||
- Add experimental implementation of the `GET /_matrix/client/v1/rtc/transports` endpoint for the latest draft of [MSC4143: MatrixRTC](https://github.com/matrix-org/matrix-spec-proposals/pull/4143). ([\#18967](https://github.com/element-hq/synapse/issues/18967))
|
||||
- Expose a `defer_to_threadpool` function in the Synapse Module API that allows modules to run a function on a separate thread in a custom threadpool. ([\#19032](https://github.com/element-hq/synapse/issues/19032))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix room upgrade `room_config` argument and documentation for `user_may_create_room` spam-checker callback. ([\#18721](https://github.com/element-hq/synapse/issues/18721))
|
||||
- Compute a user's last seen timestamp from their devices' last seen timestamps instead of IPs, because the latter are automatically cleared according to `user_ips_max_age`. ([\#18948](https://github.com/element-hq/synapse/issues/18948))
|
||||
- Fix bug where ephemeral events were not filtered by room ID. Contributed by @frastefanini. ([\#19002](https://github.com/element-hq/synapse/issues/19002))
|
||||
- Update Synapse main process version string to include git info. ([\#19011](https://github.com/element-hq/synapse/issues/19011))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Explain how `Deferred` callbacks interact with logcontexts. ([\#18914](https://github.com/element-hq/synapse/issues/18914))
|
||||
- Fix documentation for `rc_room_creation` and `rc_reports` to clarify that a `per_user` rate limit is not supported. ([\#18998](https://github.com/element-hq/synapse/issues/18998))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Remove deprecated `LoggingContext.set_current_context`/`LoggingContext.current_context` methods which already have equivalent bare methods in `synapse.logging.context`. ([\#18989](https://github.com/element-hq/synapse/issues/18989))
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Cleanly shutdown `SynapseHomeServer` object, allowing artifacts of embedded small hosts to be properly garbage collected. ([\#18828](https://github.com/element-hq/synapse/issues/18828))
|
||||
- Update OEmbed providers to use 'X' instead of 'Twitter' in URL previews, following a rebrand. Contributed by @HammyHavoc. ([\#18767](https://github.com/element-hq/synapse/issues/18767))
|
||||
- Fix `server_name` in logging context for multiple Synapse instances in one process. ([\#18868](https://github.com/element-hq/synapse/issues/18868))
|
||||
- Wrap the Rust HTTP client with `make_deferred_yieldable` so it follows Synapse logcontext rules. ([\#18903](https://github.com/element-hq/synapse/issues/18903))
|
||||
- Fix the GitHub Actions workflow that moves issues labeled "X-Needs-Info" to the "Needs info" column on the team's internal triage board. ([\#18913](https://github.com/element-hq/synapse/issues/18913))
|
||||
- Disconnect background process work from request trace. ([\#18932](https://github.com/element-hq/synapse/issues/18932))
|
||||
- Reduce overall number of calls to `_get_e2e_cross_signing_signatures_for_devices` by increasing the batch size of devices the query is called with, reducing DB load. ([\#18939](https://github.com/element-hq/synapse/issues/18939))
|
||||
- Update error code used when an appservice tries to masquerade as an unknown device using [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326). Contributed by @tulir @ Beeper. ([\#18947](https://github.com/element-hq/synapse/issues/18947))
|
||||
- Fix `no active span when trying to log` tracing error on startup (when OpenTracing is enabled). ([\#18959](https://github.com/element-hq/synapse/issues/18959))
|
||||
- Fix `run_coroutine_in_background(...)` incorrectly handling logcontext. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
||||
- Add debug logs wherever we change current logcontext. ([\#18966](https://github.com/element-hq/synapse/issues/18966))
|
||||
- Update dockerfile metadata to fix broken link; point to documentation website. ([\#18971](https://github.com/element-hq/synapse/issues/18971))
|
||||
- Note that the code is additionally licensed under the [Element Commercial license](https://github.com/element-hq/synapse/blob/develop/LICENSE-COMMERCIAL) in SPDX expression field configs. ([\#18973](https://github.com/element-hq/synapse/issues/18973))
|
||||
- Fix logcontext handling in `timeout_deferred` tests. ([\#18974](https://github.com/element-hq/synapse/issues/18974))
|
||||
- Remove internal `ReplicationUploadKeysForUserRestServlet` as a follow-up to the work in https://github.com/element-hq/synapse/pull/18581 that moved device changes off the main process. ([\#18988](https://github.com/element-hq/synapse/issues/18988))
|
||||
- Switch task scheduler from raw logcontext manipulation to using the dedicated logcontext utils. ([\#18990](https://github.com/element-hq/synapse/issues/18990))
|
||||
- Remove `MockClock()` in tests. ([\#18992](https://github.com/element-hq/synapse/issues/18992))
|
||||
- Switch back to our own custom `LogContextScopeManager` instead of OpenTracing's `ContextVarsScopeManager` which was causing problems when using the experimental `SYNAPSE_ASYNC_IO_REACTOR` option with tracing enabled. ([\#19007](https://github.com/element-hq/synapse/issues/19007))
|
||||
- Remove `version_string` argument from `HomeServer` since it's always the same. ([\#19012](https://github.com/element-hq/synapse/issues/19012))
|
||||
- Remove duplicate call to `hs.start_background_tasks()` introduced from a bad merge. ([\#19013](https://github.com/element-hq/synapse/issues/19013))
|
||||
- Split homeserver creation (`create_homeserver`) and setup (`setup`). ([\#19015](https://github.com/element-hq/synapse/issues/19015))
|
||||
- Swap near-end-of-life `macos-13` GitHub Actions runner for the `macos-15-intel` variant. ([\#19025](https://github.com/element-hq/synapse/issues/19025))
|
||||
- Introduce `RootConfig.validate_config()` which can be subclassed in `HomeServerConfig` to do cross-config class validation. ([\#19027](https://github.com/element-hq/synapse/issues/19027))
|
||||
- Allow any command of the `release.py` script to accept a `--gh-token` argument. ([\#19035](https://github.com/element-hq/synapse/issues/19035))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump Swatinem/rust-cache from 2.8.0 to 2.8.1. ([\#18949](https://github.com/element-hq/synapse/issues/18949))
|
||||
* Bump actions/cache from 4.2.4 to 4.3.0. ([\#18983](https://github.com/element-hq/synapse/issues/18983))
|
||||
* Bump anyhow from 1.0.99 to 1.0.100. ([\#18950](https://github.com/element-hq/synapse/issues/18950))
|
||||
* Bump authlib from 1.6.3 to 1.6.4. ([\#18957](https://github.com/element-hq/synapse/issues/18957))
|
||||
* Bump authlib from 1.6.4 to 1.6.5. ([\#19019](https://github.com/element-hq/synapse/issues/19019))
|
||||
* Bump bcrypt from 4.3.0 to 5.0.0. ([\#18984](https://github.com/element-hq/synapse/issues/18984))
|
||||
* Bump docker/login-action from 3.5.0 to 3.6.0. ([\#18978](https://github.com/element-hq/synapse/issues/18978))
|
||||
* Bump lxml from 6.0.0 to 6.0.2. ([\#18979](https://github.com/element-hq/synapse/issues/18979))
|
||||
* Bump phonenumbers from 9.0.13 to 9.0.14. ([\#18954](https://github.com/element-hq/synapse/issues/18954))
|
||||
* Bump phonenumbers from 9.0.14 to 9.0.15. ([\#18991](https://github.com/element-hq/synapse/issues/18991))
|
||||
* Bump prometheus-client from 0.22.1 to 0.23.1. ([\#19016](https://github.com/element-hq/synapse/issues/19016))
|
||||
* Bump pydantic from 2.11.9 to 2.11.10. ([\#19017](https://github.com/element-hq/synapse/issues/19017))
|
||||
* Bump pygithub from 2.7.0 to 2.8.1. ([\#18952](https://github.com/element-hq/synapse/issues/18952))
|
||||
* Bump regex from 1.11.2 to 1.11.3. ([\#18981](https://github.com/element-hq/synapse/issues/18981))
|
||||
* Bump serde from 1.0.224 to 1.0.226. ([\#18953](https://github.com/element-hq/synapse/issues/18953))
|
||||
* Bump serde from 1.0.226 to 1.0.228. ([\#18982](https://github.com/element-hq/synapse/issues/18982))
|
||||
* Bump setuptools-rust from 1.11.1 to 1.12.0. ([\#18980](https://github.com/element-hq/synapse/issues/18980))
|
||||
* Bump twine from 6.1.0 to 6.2.0. ([\#18985](https://github.com/element-hq/synapse/issues/18985))
|
||||
* Bump types-pyyaml from 6.0.12.20250809 to 6.0.12.20250915. ([\#19018](https://github.com/element-hq/synapse/issues/19018))
|
||||
* Bump types-requests from 2.32.4.20250809 to 2.32.4.20250913. ([\#18951](https://github.com/element-hq/synapse/issues/18951))
|
||||
* Bump typing-extensions from 4.14.1 to 4.15.0. ([\#18956](https://github.com/element-hq/synapse/issues/18956))
|
||||
|
||||
# Synapse 1.139.2 (2025-10-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.139.1 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.1 (2025-10-07)
|
||||
|
||||
## Security Fixes
|
||||
|
||||
- Fix [CVE-2025-61672](https://www.cve.org/CVERecord?id=CVE-2025-61672) / [GHSA-fh66-fcv5-jjfr](https://github.com/element-hq/synapse/security/advisories/GHSA-fh66-fcv5-jjfr). Lack of validation for device keys in Synapse before 1.139.1 allows an attacker registered on the victim homeserver to degrade federation functionality, unpredictably breaking outbound federation to other homeservers. ([\#17097](https://github.com/element-hq/synapse/issues/17097))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.4 (2025-10-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.138.3 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.3 (2025-10-07)
|
||||
|
||||
## Security Fixes
|
||||
|
||||
- Fix [CVE-2025-61672](https://www.cve.org/CVERecord?id=CVE-2025-61672) / [GHSA-fh66-fcv5-jjfr](https://github.com/element-hq/synapse/security/advisories/GHSA-fh66-fcv5-jjfr). Lack of validation for device keys in Synapse before 1.139.1 allows an attacker registered on the victim homeserver to degrade federation functionality, unpredictably breaking outbound federation to other homeservers. ([\#17097](https://github.com/element-hq/synapse/issues/17097))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
# Synapse 1.139.0 (2025-09-30)
|
||||
|
||||
### `/register` requests from old application service implementations may break when using MAS
|
||||
|
||||
If you are using Matrix Authentication Service (MAS), as of this release any
|
||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
||||
/_matrix/client/v3/register` will receive the error
|
||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. Please see [the
|
||||
upgrade
|
||||
notes](https://element-hq.github.io/synapse/develop/upgrade.html#register-requests-from-old-application-service-implementations-may-break-when-using-mas)
|
||||
for more information.
|
||||
|
||||
No significant changes since 1.139.0rc3.
|
||||
|
||||
|
||||
# Synapse 1.139.0rc3 (2025-09-25)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.139.0rc1 where `run_coroutine_in_background(...)` incorrectly handled logcontexts, resulting in partially broken logging. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.0rc2 (2025-09-23)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.139.0rc1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.0rc1 (2025-09-23)
|
||||
|
||||
## Features
|
||||
|
||||
- Add experimental support for [MSC4308: Thread Subscriptions extension to Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4308) when [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) and [MSC4186: Simplified Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) are enabled. ([\#18695](https://github.com/element-hq/synapse/issues/18695))
|
||||
- Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow a newer draft. ([\#18846](https://github.com/element-hq/synapse/issues/18846))
|
||||
- Add `get_media_upload_limits_for_user` and `on_media_upload_limit_exceeded` module API callbacks to the media repository. ([\#18848](https://github.com/element-hq/synapse/issues/18848))
|
||||
- Support [MSC4169](https://github.com/matrix-org/matrix-spec-proposals/pull/4169) for backwards-compatible redaction sending using the `/send` endpoint. Contributed by @SpiritCroc @ Beeper. ([\#18898](https://github.com/element-hq/synapse/issues/18898))
|
||||
- Add an in-memory cache to `_get_e2e_cross_signing_signatures_for_devices` to reduce DB load. ([\#18899](https://github.com/element-hq/synapse/issues/18899))
|
||||
- Update [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) support to return correct errors and allow appservices to reset cross-signing keys without user-interactive authentication. Contributed by @tulir @ Beeper. ([\#18946](https://github.com/element-hq/synapse/issues/18946))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Ensure all PDUs sent via `/send` pass canonical JSON checks. ([\#18641](https://github.com/element-hq/synapse/issues/18641))
|
||||
- Fix bug where we did not send invite revocations over federation. ([\#18823](https://github.com/element-hq/synapse/issues/18823))
|
||||
- Fix prefixed support for [MSC4133](https://github.com/matrix-org/matrix-spec-proposals/pull/4133). ([\#18875](https://github.com/element-hq/synapse/issues/18875))
|
||||
- Fix open redirect in legacy SSO flow with the `idp` query parameter. ([\#18909](https://github.com/element-hq/synapse/issues/18909))
|
||||
- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926))
|
||||
|
||||
## Updates to the Docker image
|
||||
|
||||
- Suppress "Applying schema" log noise bulk when `SYNAPSE_LOG_TESTING` is set. ([\#18878](https://github.com/element-hq/synapse/issues/18878))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Clarify Python dependency constraints in our deprecation policy. ([\#18856](https://github.com/element-hq/synapse/issues/18856))
|
||||
- Clarify necessary `jwt_config` parameter in OIDC documentation for authentik. Contributed by @maxkratz. ([\#18931](https://github.com/element-hq/synapse/issues/18931))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Remove obsolete and experimental `/sync/e2ee` endpoint. ([\#18583](https://github.com/element-hq/synapse/issues/18583))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Fix `LaterGauge` metrics to collect from all servers. ([\#18791](https://github.com/element-hq/synapse/issues/18791))
|
||||
- Configure Synapse to run [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) Complement tests. ([\#18819](https://github.com/element-hq/synapse/issues/18819))
|
||||
- Remove `sentinel` logcontext usage where we log in `setup`, `start` and `exit`. ([\#18870](https://github.com/element-hq/synapse/issues/18870))
|
||||
- Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features. ([\#18874](https://github.com/element-hq/synapse/issues/18874))
|
||||
- Start background tasks after we fork the process (daemonize). ([\#18886](https://github.com/element-hq/synapse/issues/18886))
|
||||
- Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`. ([\#18900](https://github.com/element-hq/synapse/issues/18900), [\#18906](https://github.com/element-hq/synapse/issues/18906))
|
||||
- Remove `sentinel` logcontext usage in `Clock` utilities like `looping_call` and `call_later`. ([\#18907](https://github.com/element-hq/synapse/issues/18907))
|
||||
- Replace usages of the deprecated `pkg_resources` interface in preparation of setuptools dropping it soon. ([\#18910](https://github.com/element-hq/synapse/issues/18910))
|
||||
- Split loading config from homeserver `setup`. ([\#18933](https://github.com/element-hq/synapse/issues/18933))
|
||||
- Fix `run_in_background` not being awaited properly in some tests causing `LoggingContext` problems. ([\#18937](https://github.com/element-hq/synapse/issues/18937))
|
||||
- Fix `run_as_background_process` not being awaited properly causing `LoggingContext` problems in experimental [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140): Delayed events implementation. ([\#18938](https://github.com/element-hq/synapse/issues/18938))
|
||||
- Introduce `Clock.call_when_running(...)` to wrap startup code in a logcontext, ensuring we can identify which server generated the logs. ([\#18944](https://github.com/element-hq/synapse/issues/18944))
|
||||
- Introduce `Clock.add_system_event_trigger(...)` to wrap system event callback code in a logcontext, ensuring we can identify which server generated the logs. ([\#18945](https://github.com/element-hq/synapse/issues/18945))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/setup-go from 5.5.0 to 6.0.0. ([\#18891](https://github.com/element-hq/synapse/issues/18891))
|
||||
* Bump actions/setup-python from 5.6.0 to 6.0.0. ([\#18890](https://github.com/element-hq/synapse/issues/18890))
|
||||
* Bump authlib from 1.6.1 to 1.6.3. ([\#18921](https://github.com/element-hq/synapse/issues/18921))
|
||||
* Bump jsonschema from 4.25.0 to 4.25.1. ([\#18897](https://github.com/element-hq/synapse/issues/18897))
|
||||
* Bump log from 0.4.27 to 0.4.28. ([\#18892](https://github.com/element-hq/synapse/issues/18892))
|
||||
* Bump phonenumbers from 9.0.12 to 9.0.13. ([\#18893](https://github.com/element-hq/synapse/issues/18893))
|
||||
* Bump pydantic from 2.11.7 to 2.11.9. ([\#18922](https://github.com/element-hq/synapse/issues/18922))
|
||||
* Bump serde from 1.0.219 to 1.0.223. ([\#18920](https://github.com/element-hq/synapse/issues/18920))
|
||||
* Bump serde_json from 1.0.143 to 1.0.145. ([\#18919](https://github.com/element-hq/synapse/issues/18919))
|
||||
* Bump sigstore/cosign-installer from 3.9.2 to 3.10.0. ([\#18917](https://github.com/element-hq/synapse/issues/18917))
|
||||
* Bump towncrier from 24.8.0 to 25.8.0. ([\#18894](https://github.com/element-hq/synapse/issues/18894))
|
||||
* Bump types-psycopg2 from 2.9.21.20250809 to 2.9.21.20250915. ([\#18918](https://github.com/element-hq/synapse/issues/18918))
|
||||
* Bump types-requests from 2.32.4.20250611 to 2.32.4.20250809. ([\#18895](https://github.com/element-hq/synapse/issues/18895))
|
||||
* Bump types-setuptools from 80.9.0.20250809 to 80.9.0.20250822. ([\#18924](https://github.com/element-hq/synapse/issues/18924))
|
||||
|
||||
# Synapse 1.138.2 (2025-09-24)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.138.1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.1 (2025-09-24)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.0 (2025-09-09)
|
||||
|
||||
No significant changes since 1.138.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.0rc1 (2025-09-02)
|
||||
|
||||
### Features
|
||||
|
||||
- Support for the stable endpoint and scopes of [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) & co. ([\#18549](https://github.com/element-hq/synapse/issues/18549))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Improve database performance of [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) - Redact on Kick/Ban. ([\#18851](https://github.com/element-hq/synapse/issues/18851))
|
||||
- Do not throw an error when fetching a rejected delayed state event on startup. ([\#18858](https://github.com/element-hq/synapse/issues/18858))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Fix worker documentation incorrectly indicating all room Admin API requests were capable of being handled by workers. ([\#18853](https://github.com/element-hq/synapse/issues/18853))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Instrument `_ByteProducer` with tracing to measure potential dead time while writing bytes to the request. ([\#18804](https://github.com/element-hq/synapse/issues/18804))
|
||||
- Switch to OpenTracing's `ContextVarsScopeManager` instead of our own custom `LogContextScopeManager`. ([\#18849](https://github.com/element-hq/synapse/issues/18849))
|
||||
- Trace how much work is being done while "recursively fetching redactions". ([\#18854](https://github.com/element-hq/synapse/issues/18854))
|
||||
- Link [upstream Twisted bug](https://github.com/twisted/twisted/issues/12498) tracking the problem that explains why we have to use a `Producer` to write bytes to the request. ([\#18855](https://github.com/element-hq/synapse/issues/18855))
|
||||
- Introduce `EventPersistencePair` type. ([\#18857](https://github.com/element-hq/synapse/issues/18857))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/add-to-project from c0c5949b017d0d4a39f7ba888255881bdac2a823 to 4515659e2b458b27365e167605ac44f219494b66. ([\#18863](https://github.com/element-hq/synapse/issues/18863))
|
||||
* Bump actions/checkout from 4.3.0 to 5.0.0. ([\#18834](https://github.com/element-hq/synapse/issues/18834))
|
||||
* Bump anyhow from 1.0.98 to 1.0.99. ([\#18841](https://github.com/element-hq/synapse/issues/18841))
|
||||
* Bump docker/login-action from 3.4.0 to 3.5.0. ([\#18835](https://github.com/element-hq/synapse/issues/18835))
|
||||
* Bump dtolnay/rust-toolchain from b3b07ba8b418998c39fb20f53e8b695cdcc8de1b to e97e2d8cc328f1b50210efc529dca0028893a2d9. ([\#18862](https://github.com/element-hq/synapse/issues/18862))
|
||||
* Bump phonenumbers from 9.0.11 to 9.0.12. ([\#18837](https://github.com/element-hq/synapse/issues/18837))
|
||||
* Bump regex from 1.11.1 to 1.11.2. ([\#18864](https://github.com/element-hq/synapse/issues/18864))
|
||||
* Bump reqwest from 0.12.22 to 0.12.23. ([\#18842](https://github.com/element-hq/synapse/issues/18842))
|
||||
* Bump ruff from 0.12.7 to 0.12.10. ([\#18865](https://github.com/element-hq/synapse/issues/18865))
|
||||
* Bump serde_json from 1.0.142 to 1.0.143. ([\#18866](https://github.com/element-hq/synapse/issues/18866))
|
||||
* Bump types-bleach from 6.2.0.20250514 to 6.2.0.20250809. ([\#18838](https://github.com/element-hq/synapse/issues/18838))
|
||||
* Bump types-jsonschema from 4.25.0.20250720 to 4.25.1.20250822. ([\#18867](https://github.com/element-hq/synapse/issues/18867))
|
||||
* Bump types-psycopg2 from 2.9.21.20250718 to 2.9.21.20250809. ([\#18836](https://github.com/element-hq/synapse/issues/18836))
|
||||
|
||||
# Synapse 1.137.0 (2025-08-26)
|
||||
|
||||
No significant changes since 1.137.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.137.0rc1 (2025-08-19)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
43
Cargo.lock
generated
43
Cargo.lock
generated
@@ -28,9 +28,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.98"
|
||||
version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -753,9 +753,9 @@ checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.27"
|
||||
version = "0.4.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
|
||||
|
||||
[[package]]
|
||||
name = "lru-slab"
|
||||
@@ -1062,9 +1062,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.1"
|
||||
version = "1.11.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
||||
checksum = "8b5288124840bee7b386bc413c487869b360b2b4ec421ea56425128692f2a82c"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -1074,9 +1074,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.9"
|
||||
version = "0.4.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
||||
checksum = "833eb9ce86d40ef33cb1306d8accf7bc8ec2bfea4355cbdebb3df68b40925cad"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -1091,9 +1091,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.22"
|
||||
version = "0.12.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
|
||||
checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
@@ -1250,18 +1250,28 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.219"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.219"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -1270,14 +1280,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.142"
|
||||
version = "1.0.145"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7"
|
||||
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
"ryu",
|
||||
"serde",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -265,6 +265,8 @@ This software is dual-licensed by New Vector Ltd (Element). It can be used eithe
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
||||
|
||||
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase an Element commercial license for this software.
|
||||
|
||||
|
||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||
:alt: (get community support in #synapse:matrix.org)
|
||||
|
||||
1
changelog.d/19031.feature
Normal file
1
changelog.d/19031.feature
Normal file
@@ -0,0 +1 @@
|
||||
Allow using [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) behavior without the opt-in registration flag. Contributed by @tulir @ Beeper.
|
||||
1
changelog.d/19033.feature
Normal file
1
changelog.d/19033.feature
Normal file
@@ -0,0 +1 @@
|
||||
Stabilized support for [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326): Device masquerading for appservices. Contributed by @tulir @ Beeper.
|
||||
1
changelog.d/19037.misc
Normal file
1
changelog.d/19037.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move unique snowflake homeserver background tasks to `start_background_tasks` (the standard pattern for this kind of thing).
|
||||
1
changelog.d/19039.misc
Normal file
1
changelog.d/19039.misc
Normal file
@@ -0,0 +1 @@
|
||||
Drop a deprecated field of the `PyGitHub` dependency in the release script and raise the dependency's minimum version to `1.59.0`.
|
||||
1
changelog.d/19040.misc
Normal file
1
changelog.d/19040.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update TODO list of conflicting areas where we encounter metrics being clobbered (`ApplicationService`).
|
||||
1
changelog.d/19060.bugfix
Normal file
1
changelog.d/19060.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in 1.136.0 that would prevent Synapse from being able to be `reload`-ed more than once when running under systemd.
|
||||
1
changelog.d/19064.docker
Normal file
1
changelog.d/19064.docker
Normal file
@@ -0,0 +1 @@
|
||||
Update docker image to use Debian trixie as the base and thus Python 3.13.
|
||||
1
changelog.d/19078.bugfix
Normal file
1
changelog.d/19078.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in 1.140.0 where an internal server error could be raised when hashing user passwords that are too long.
|
||||
1
changelog.d/19079.bugfix
Normal file
1
changelog.d/19079.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane.
|
||||
1
changelog.d/19080.misc
Normal file
1
changelog.d/19080.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update deprecated code in the release script to prevent a warning message from being printed.
|
||||
86
debian/changelog
vendored
86
debian/changelog
vendored
@@ -1,3 +1,89 @@
|
||||
matrix-synapse-py3 (1.140.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.140.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
|
||||
|
||||
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.140.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.4) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 12:54:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.2) stable; urgency=medium
|
||||
|
||||
* The licensing specifier has been updated to add an optional
|
||||
`LicenseRef-Element-Commercial` license. The code was already licensed in
|
||||
this manner - the debian metadata was just not updated to reflect it.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:17:17 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.138.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Sep 2025 12:16:14 +0000
|
||||
|
||||
matrix-synapse-py3 (1.137.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.137.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Aug 2025 10:23:41 +0100
|
||||
|
||||
matrix-synapse-py3 (1.137.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.137.0rc1.
|
||||
|
||||
2
debian/copyright
vendored
2
debian/copyright
vendored
@@ -8,7 +8,7 @@ License: Apache-2.0
|
||||
|
||||
Files: *
|
||||
Copyright: 2023 New Vector Ltd
|
||||
License: AGPL-3.0-or-later
|
||||
License: AGPL-3.0-or-later or LicenseRef-Element-Commercial
|
||||
|
||||
Files: synapse/config/saml2.py
|
||||
Copyright: 2015, Ericsson
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||
# in `poetry export` in the past.
|
||||
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
ARG PYTHON_VERSION=3.13
|
||||
ARG POETRY_VERSION=2.1.1
|
||||
|
||||
###
|
||||
@@ -142,10 +142,10 @@ RUN \
|
||||
libwebp7 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libicu \
|
||||
| grep '^\w' > /tmp/pkg-list && \
|
||||
for arch in arm64 amd64; do \
|
||||
mkdir -p /tmp/debs-${arch} && \
|
||||
chown _apt:root /tmp/debs-${arch} && \
|
||||
cd /tmp/debs-${arch} && \
|
||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
||||
done
|
||||
@@ -171,16 +171,11 @@ FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
||||
|
||||
ARG TARGETARCH
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||
LABEL org.opencontainers.image.url='https://github.com/element-hq/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synapse/latest/'
|
||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element-Commercial'
|
||||
|
||||
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
|
||||
# libraries to the right place, else the `COPY` won't work.
|
||||
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
|
||||
# already present in the runtime image.
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# syntax=docker/dockerfile:1-labs
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
ARG PYTHON_VERSION=3.13
|
||||
ARG REDIS_VERSION=7.2
|
||||
|
||||
# first of all, we create a base image with dependencies which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
@@ -11,15 +12,27 @@ ARG PYTHON_VERSION=3.12
|
||||
|
||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
ARG DEBIAN_VERSION
|
||||
ARG REDIS_VERSION
|
||||
|
||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
# The upstream redis-server deb has fewer dynamic libraries than Debian's package which makes it easier to copy later on
|
||||
RUN \
|
||||
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb ${DEBIAN_VERSION} main" | tee /etc/apt/sources.list.d/redis.list
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||
nginx-light
|
||||
nginx-light \
|
||||
redis-server="6:${REDIS_VERSION}.*" redis-tools="6:${REDIS_VERSION}.*" \
|
||||
# libicu is required by postgres, see `docker/complement/Dockerfile`
|
||||
libicu76
|
||||
|
||||
RUN \
|
||||
# remove default page
|
||||
@@ -35,19 +48,12 @@ FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
RUN mkdir -p /uv/etc/supervisor/conf.d
|
||||
|
||||
# Similarly, a base to copy the redis server from.
|
||||
#
|
||||
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||
# which makes it much easier to copy (but we need to make sure we use an image
|
||||
# based on the same debian version as the synapse image, to make sure we get
|
||||
# the expected version of libc.
|
||||
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM $FROM
|
||||
|
||||
# Copy over dependencies
|
||||
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||
COPY --from=deps_base --parents /usr/lib/*-linux-gnu/libicu* /
|
||||
COPY --from=deps_base /usr/bin/redis-server /usr/local/bin
|
||||
COPY --from=deps_base /uv /
|
||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
|
||||
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
||||
|
||||
@@ -18,10 +18,10 @@ FROM $FROM
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
|
||||
# This trick only works because (a) the Synapse image happens to have all the
|
||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
# This trick only works because we use a postgres image based on the same
|
||||
# debian version as Synapse's docker image (so the versions of the shared
|
||||
# libraries match). Any missing libraries need to be added to either the
|
||||
# Synapse image or docker/Dockerfile-workers.
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||
|
||||
@@ -133,6 +133,8 @@ experimental_features:
|
||||
msc3984_appservice_key_query: true
|
||||
# Invite filtering
|
||||
msc4155_enabled: true
|
||||
# Thread Subscriptions
|
||||
msc4306_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
|
||||
@@ -77,6 +77,13 @@ loggers:
|
||||
#}
|
||||
synapse.visibility.filtered_event_debug:
|
||||
level: DEBUG
|
||||
|
||||
{#
|
||||
If Synapse is under test, we don't care about seeing the "Applying schema" log
|
||||
lines at the INFO level every time we run the tests (it's 100 lines of bulk)
|
||||
#}
|
||||
synapse.storage.prepare_database:
|
||||
level: WARN
|
||||
{% endif %}
|
||||
|
||||
root:
|
||||
|
||||
@@ -8,9 +8,9 @@ ARG PYTHON_VERSION=3.9
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bookworm.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||
# We hardcode the use of Debian trixie here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting trixie.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-trixie
|
||||
|
||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||
# install the OS build deps
|
||||
|
||||
@@ -60,6 +60,7 @@
|
||||
- [Admin API](usage/administration/admin_api/README.md)
|
||||
- [Account Validity](admin_api/account_validity.md)
|
||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||
- [Fetch Event](admin_api/fetch_event.md)
|
||||
- [Event Reports](admin_api/event_reports.md)
|
||||
- [Experimental Features](admin_api/experimental_features.md)
|
||||
- [Media](admin_api/media_admin_api.md)
|
||||
|
||||
53
docs/admin_api/fetch_event.md
Normal file
53
docs/admin_api/fetch_event.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# Fetch Event API
|
||||
|
||||
The fetch event API allows admins to fetch an event regardless of their membership in the room it
|
||||
originated in.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/fetch_event/<event_id>
|
||||
```
|
||||
|
||||
The API returns a JSON body like the following:
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"event": {
|
||||
"auth_events": [
|
||||
"$WhLChbYg6atHuFRP7cUd95naUtc8L0f7fqeizlsUVvc",
|
||||
"$9Wj8dt02lrNEWweeq-KjRABUYKba0K9DL2liRvsAdtQ",
|
||||
"$qJxBFxBt8_ODd9b3pgOL_jXP98S_igc1_kizuPSZFi4"
|
||||
],
|
||||
"content": {
|
||||
"body": "Hey now",
|
||||
"msgtype": "m.text"
|
||||
},
|
||||
"depth": 6,
|
||||
"event_id": "$hJ_kcXbVMcI82JDrbqfUJIHu61tJD86uIFJ_8hNHi7s",
|
||||
"hashes": {
|
||||
"sha256": "LiNw8DtrRVf55EgAH8R42Wz7WCJUqGsPt2We6qZO5Rg"
|
||||
},
|
||||
"origin_server_ts": 799,
|
||||
"prev_events": [
|
||||
"$cnSUrNMnC3Ywh9_W7EquFxYQjC_sT3BAAVzcUVxZq1g"
|
||||
],
|
||||
"room_id": "!aIhKToCqgPTBloWMpf:test",
|
||||
"sender": "@user:test",
|
||||
"signatures": {
|
||||
"test": {
|
||||
"ed25519:a_lPym": "7mqSDwK1k7rnw34Dd8Fahu0rhPW7jPmcWPRtRDoEN9Yuv+BCM2+Rfdpv2MjxNKy3AYDEBwUwYEuaKMBaEMiKAQ"
|
||||
}
|
||||
},
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age_ts": 799
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -39,6 +39,40 @@ the use of the
|
||||
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
|
||||
Admin API.
|
||||
|
||||
## Query a piece of media by ID
|
||||
|
||||
This API returns information about a piece of local or cached remote media given the origin server name and media id. If
|
||||
information is requested for remote media which is not cached the endpoint will return 404.
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/media/<origin>/<media_id>
|
||||
```
|
||||
|
||||
The API returns a JSON body with media info like the following:
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"media_info": {
|
||||
"media_origin": "remote.com",
|
||||
"user_id": null,
|
||||
"media_id": "sdginwegWEG",
|
||||
"media_type": "img/png",
|
||||
"media_length": 67,
|
||||
"upload_name": "test.png",
|
||||
"created_ts": 300,
|
||||
"filesystem_id": "wgeweg",
|
||||
"url_cache": null,
|
||||
"last_access_ts": 400,
|
||||
"quarantined_by": null,
|
||||
"authenticated": false,
|
||||
"safe_from_quarantine": null,
|
||||
"sha256": "ebf4f635a17d10d6eb46ba680b70142419aa3220f228001a036d311a22ee9d2a"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
# Quarantine media
|
||||
|
||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
Deprecation Policy for Platform Dependencies
|
||||
============================================
|
||||
# Deprecation Policy
|
||||
|
||||
Synapse has a number of platform dependencies, including Python, Rust,
|
||||
PostgreSQL and SQLite. This document outlines the policy towards which versions
|
||||
we support, and when we drop support for versions in the future.
|
||||
Synapse has a number of **platform dependencies** (Python, Rust, PostgreSQL, and SQLite)
|
||||
and **application dependencies** (Python and Rust packages). This document outlines the
|
||||
policy towards which versions we support, and when we drop support for versions in the
|
||||
future.
|
||||
|
||||
|
||||
Policy
|
||||
------
|
||||
## Platform Dependencies
|
||||
|
||||
Synapse follows the upstream support life cycles for Python and PostgreSQL,
|
||||
i.e. when a version reaches End of Life Synapse will withdraw support for that
|
||||
@@ -26,8 +24,8 @@ The oldest supported version of SQLite is the version
|
||||
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||
|
||||
Context
|
||||
-------
|
||||
|
||||
### Context
|
||||
|
||||
It is important for system admins to have a clear understanding of the platform
|
||||
requirements of Synapse and its deprecation policies so that they can
|
||||
@@ -50,4 +48,42 @@ the ecosystem.
|
||||
On a similar note, SQLite does not generally have a concept of "supported
|
||||
release"; bugfixes are published for the latest minor release only. We chose to
|
||||
track Debian's oldstable as this is relatively conservative, predictably updated
|
||||
and is consistent with the `.deb` packages released by Matrix.org.
|
||||
and is consistent with the `.deb` packages released by Matrix.org.
|
||||
|
||||
|
||||
## Application dependencies
|
||||
|
||||
For application-level Python dependencies, we often specify loose version constraints
|
||||
(ex. `>=X.Y.Z`) to be forwards compatible with any new versions. Upper bounds (`<A.B.C`)
|
||||
are only added when necessary to prevent known incompatibilities.
|
||||
|
||||
When selecting a minimum version, while we are mindful of the impact on downstream
|
||||
package maintainers, our primary focus is on the maintainability and progress of Synapse
|
||||
itself.
|
||||
|
||||
For developers, a Python dependency version can be considered a "no-brainer" upgrade once it is
|
||||
available in both the latest [Debian Stable](https://packages.debian.org/stable/) and
|
||||
[Ubuntu LTS](https://launchpad.net/ubuntu) repositories. No need to burden yourself with
|
||||
extra scrutiny or consideration at this point.
|
||||
|
||||
We aggressively update Rust dependencies. Since these are statically linked and managed
|
||||
entirely by `cargo` during build, they *can* pose no ongoing maintenance burden on others.
|
||||
This allows us to freely upgrade to leverage the latest ecosystem advancements assuming
|
||||
they don't have their own system-level dependencies.
|
||||
|
||||
|
||||
### Context
|
||||
|
||||
Because Python dependencies can easily be managed in a virtual environment, we are less
|
||||
concerned about the criteria for selecting minimum versions. The only thing of concern
|
||||
is making sure we're not making it unnecessarily difficult for downstream package
|
||||
maintainers. Generally, this just means avoiding the bleeding edge for a few months.
|
||||
|
||||
The situation for Rust dependencies is fundamentally different. For packagers, the
|
||||
concerns around Python dependency versions do not apply. The `cargo` tool handles
|
||||
downloading and building all libraries to satisfy dependencies, and these libraries are
|
||||
statically linked into the final binary. This means that from a packager's perspective,
|
||||
the Rust dependency versions are an internal build detail, not a runtime dependency to
|
||||
be managed on the target system. Consequently, we have even greater flexibility to
|
||||
upgrade Rust dependencies as needed for the project. Some distros (e.g. Fedora) do
|
||||
package Rust libraries, but this appears to be the outlier rather than the norm.
|
||||
|
||||
@@ -59,6 +59,28 @@ def do_request_handling():
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
### The `sentinel` context
|
||||
|
||||
The default logcontext is `synapse.logging.context.SENTINEL_CONTEXT`, which is an empty
|
||||
sentinel value to represent the root logcontext. This is what is used when there is no
|
||||
other logcontext set. The phrase "clear/reset the logcontext" means to set the current
|
||||
logcontext to the `sentinel` logcontext.
|
||||
|
||||
No CPU/database usage metrics are recorded against the `sentinel` logcontext.
|
||||
|
||||
Ideally, nothing from the Synapse homeserver would be logged against the `sentinel`
|
||||
logcontext as we want to know which server the logs came from. In practice, this is not
|
||||
always the case yet especially outside of request handling.
|
||||
|
||||
Global things outside of Synapse (e.g. Twisted reactor code) should run in the
|
||||
`sentinel` logcontext. It's only when it calls into application code that a logcontext
|
||||
gets activated. This means the reactor should be started in the `sentinel` logcontext,
|
||||
and any time an awaitable yields control back to the reactor, it should reset the
|
||||
logcontext to be the `sentinel` logcontext. This is important to avoid leaking the
|
||||
current logcontext to the reactor (which would then get picked up and associated with
|
||||
the next thing the reactor does).
|
||||
|
||||
|
||||
## Using logcontexts with awaitables
|
||||
|
||||
Awaitables break the linear flow of code so that there is no longer a single entry point
|
||||
@@ -121,8 +143,7 @@ cares about.
|
||||
The following sections describe pitfalls and helpful patterns when
|
||||
implementing these rules.
|
||||
|
||||
Always await your awaitables
|
||||
----------------------------
|
||||
## Always await your awaitables
|
||||
|
||||
Whenever you get an awaitable back from a function, you should `await` on
|
||||
it as soon as possible. Do not pass go; do not do any logging; do not
|
||||
@@ -181,6 +202,171 @@ async def sleep(seconds):
|
||||
return await context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
||||
```
|
||||
|
||||
## Deferred callbacks
|
||||
|
||||
When a deferred callback is called, it inherits the current logcontext. The deferred
|
||||
callback chain can resume a coroutine, which if following our logcontext rules, will
|
||||
restore its own logcontext, then run:
|
||||
|
||||
- until it yields control back to the reactor, setting the sentinel logcontext
|
||||
- or until it finishes, restoring the logcontext it was started with (calling context)
|
||||
|
||||
This behavior creates two specific issues:
|
||||
|
||||
**Issue 1:** The first issue is that the callback may have reset the logcontext to the
|
||||
sentinel before returning. This means our calling function will continue with the
|
||||
sentinel logcontext instead of the logcontext it was started with (bad).
|
||||
|
||||
**Issue 2:** The second issue is that the current logcontext that called the deferred
|
||||
callback could finish before the callback finishes (bad).
|
||||
|
||||
In the following example, the deferred callback is called with the "main" logcontext and
|
||||
runs until we yield control back to the reactor in the `await` inside `clock.sleep(0)`.
|
||||
Since `clock.sleep(0)` follows our logcontext rules, it sets the logcontext to the
|
||||
sentinel before yielding control back to the reactor. Our `main` function continues with
|
||||
the sentinel logcontext (first bad thing) instead of the "main" logcontext. Then the
|
||||
`with LoggingContext("main")` block exits, finishing the "main" logcontext and yielding
|
||||
control back to the reactor again. Finally, later on when `clock.sleep(0)` completes,
|
||||
our `with LoggingContext("competing")` block exits, and restores the previous "main"
|
||||
logcontext which has already finished, resulting in `WARNING: Re-starting finished log
|
||||
context main` and leaking the `main` logcontext into the reactor which will then
|
||||
erronously be associated with the next task the reactor picks up.
|
||||
|
||||
```python
|
||||
async def competing_callback():
|
||||
# Since this is run with the "main" logcontext, when the "competing"
|
||||
# logcontext exits, it will restore the previous "main" logcontext which has
|
||||
# already finished and results in "WARNING: Re-starting finished log context main"
|
||||
# and leaking the `main` logcontext into the reactor.
|
||||
with LoggingContext("competing"):
|
||||
await clock.sleep(0)
|
||||
|
||||
def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
# Call the callback within the "main" logcontext.
|
||||
d.callback(None)
|
||||
# Bad: This will be logged against sentinel logcontext
|
||||
logger.debug("ugh")
|
||||
|
||||
main()
|
||||
```
|
||||
|
||||
**Solution 1:** We could of course fix this by following the general rule of "always
|
||||
await your awaitables":
|
||||
|
||||
```python
|
||||
async def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
d.callback(None)
|
||||
# Wait for `d` to finish before continuing so the "main" logcontext is
|
||||
# still active. This works because `d` already follows our logcontext
|
||||
# rules. If not, we would also have to use `make_deferred_yieldable(d)`.
|
||||
await d
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
**Solution 2:** We could also fix this by surrounding the call to `d.callback` with a
|
||||
`PreserveLoggingContext`, which will reset the logcontext to the sentinel before calling
|
||||
the callback, and restore the "main" logcontext afterwards before continuing the `main`
|
||||
function. This solves the problem because when the "competing" logcontext exits, it will
|
||||
restore the sentinel logcontext which is never finished by its nature, so there is no
|
||||
warning and no leakage into the reactor.
|
||||
|
||||
```python
|
||||
async def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
d.callback(None)
|
||||
with PreserveLoggingContext():
|
||||
# Call the callback with the sentinel logcontext.
|
||||
d.callback(None)
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
**Solution 3:** But let's say you *do* want to run (fire-and-forget) the deferred
|
||||
callback in the current context without running into issues:
|
||||
|
||||
We can solve the first issue by using `run_in_background(...)` to run the callback in
|
||||
the current logcontext and it handles the magic behind the scenes of a) restoring the
|
||||
calling logcontext before returning to the caller and b) resetting the logcontext to the
|
||||
sentinel after the deferred completes and we yield control back to the reactor to avoid
|
||||
leaking the logcontext into the reactor.
|
||||
|
||||
To solve the second issue, we can extend the lifetime of the "main" logcontext by
|
||||
avoiding the `LoggingContext`'s context manager lifetime methods
|
||||
(`__enter__`/`__exit__`). We can still set "main" as the current logcontext by using
|
||||
`PreserveLoggingContext` and passing in the "main" logcontext.
|
||||
|
||||
|
||||
```python
|
||||
async def main():
|
||||
main_context = LoggingContext("main")
|
||||
with PreserveLoggingContext(main_context):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
# The whole lambda will be run in the "main" logcontext. But we're using
|
||||
# a trick to return the deferred `d` itself so that `run_in_background`
|
||||
# will wait on that to complete and reset the logcontext to the sentinel
|
||||
# when it does to avoid leaking the "main" logcontext into the reactor.
|
||||
run_in_background(lambda: (d.callback(None), d)[1])
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
|
||||
...
|
||||
|
||||
# Wherever possible, it's best to finish the logcontext by calling `__exit__` at some
|
||||
# point. This allows us to catch bugs if we later try to erroneously restart a finished
|
||||
# logcontext.
|
||||
#
|
||||
# Since the "main" logcontext stores the `LoggingContext.previous_context` when it is
|
||||
# created, we can wrap this call in `PreserveLoggingContext()` to restore the correct
|
||||
# previous logcontext. Our goal is to have the calling context remain unchanged after
|
||||
# finishing the "main" logcontext.
|
||||
with PreserveLoggingContext():
|
||||
# Finish the "main" logcontext
|
||||
with main_context:
|
||||
# Empty block - We're just trying to call `__exit__` on the "main" context
|
||||
# manager to finish it. We can't call `__exit__` directly as the code expects us
|
||||
# to `__enter__` before calling `__exit__` to `start`/`stop` things
|
||||
# appropriately. And in any case, it's probably best not to call the internal
|
||||
# methods directly.
|
||||
pass
|
||||
```
|
||||
|
||||
The same thing applies if you have some deferreds stored somewhere which you want to
|
||||
callback in the current logcontext.
|
||||
|
||||
|
||||
### Deferred errbacks and cancellations
|
||||
|
||||
The same care should be taken when calling errbacks on deferreds. An errback and
|
||||
callback act the same in this regard (see section above).
|
||||
|
||||
```python
|
||||
d = defer.Deferred()
|
||||
d.addErrback(some_other_function)
|
||||
d.errback(failure)
|
||||
```
|
||||
|
||||
Additionally, cancellation is the same as directly calling the errback with a
|
||||
`twisted.internet.defer.CancelledError`:
|
||||
|
||||
```python
|
||||
d = defer.Deferred()
|
||||
d.addErrback(some_other_function)
|
||||
d.cancel()
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
## Fire-and-forget
|
||||
|
||||
Sometimes you want to fire off a chain of execution, but not wait for
|
||||
@@ -362,3 +548,19 @@ chain are dropped. Dropping the the reference to an awaitable you're
|
||||
supposed to be awaiting is bad practice, so this doesn't
|
||||
actually happen too much. Unfortunately, when it does happen, it will
|
||||
lead to leaked logcontexts which are incredibly hard to track down.
|
||||
|
||||
|
||||
## Debugging logcontext issues
|
||||
|
||||
Debugging logcontext issues can be tricky as leaking or losing a logcontext will surface
|
||||
downstream and can point to an unrelated part of the codebase. It's best to enable debug
|
||||
logging for `synapse.logging.context.debug` (needs to be explicitly configured) and go
|
||||
backwards in the logs from the point where the issue is observed to find the root cause.
|
||||
|
||||
`log.config.yaml`
|
||||
```yaml
|
||||
loggers:
|
||||
# Unlike other loggers, this one needs to be explicitly configured to see debug logs.
|
||||
synapse.logging.context.debug:
|
||||
level: DEBUG
|
||||
```
|
||||
|
||||
@@ -64,3 +64,68 @@ If multiple modules implement this callback, they will be considered in order. I
|
||||
returns `True`, Synapse falls through to the next one. The value of the first callback that
|
||||
returns `False` will be used. If this happens, Synapse will not call any of the subsequent
|
||||
implementations of this callback.
|
||||
|
||||
### `get_media_upload_limits_for_user`
|
||||
|
||||
_First introduced in Synapse v1.139.0_
|
||||
|
||||
```python
|
||||
async def get_media_upload_limits_for_user(user_id: str, size: int) -> Optional[List[synapse.module_api.MediaUploadLimit]]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
Caution: This callback is currently experimental. The method signature or behaviour
|
||||
may change without notice.
|
||||
</span>**
|
||||
|
||||
Called when processing a request to store content in the media repository. This can be used to dynamically override
|
||||
the [media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits).
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
||||
|
||||
If the callback returns a list then it will be used as the limits instead of those in the configuration (if any).
|
||||
|
||||
If an empty list is returned then no limits are applied (**warning:** users will be able
|
||||
to upload as much data as they desire).
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
If there are no registered modules, or if all modules return `None`, then
|
||||
the default
|
||||
[media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits)
|
||||
will be used.
|
||||
|
||||
### `on_media_upload_limit_exceeded`
|
||||
|
||||
_First introduced in Synapse v1.139.0_
|
||||
|
||||
```python
|
||||
async def on_media_upload_limit_exceeded(user_id: str, limit: synapse.module_api.MediaUploadLimit, sent_bytes: int, attempted_bytes: int) -> None
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
Caution: This callback is currently experimental. The method signature or behaviour
|
||||
may change without notice.
|
||||
</span>**
|
||||
|
||||
Called when a user attempts to upload media that would exceed a
|
||||
[configured media upload limit](../usage/configuration/config_documentation.html#media_upload_limits).
|
||||
|
||||
This callback will only be called on workers which handle
|
||||
[POST /_matrix/media/v3/upload](https://spec.matrix.org/v1.15/client-server-api/#post_matrixmediav3upload)
|
||||
requests.
|
||||
|
||||
This could be used to inform the user that they have reached a media upload limit through
|
||||
some external method.
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
||||
* `limit`: The `synapse.module_api.MediaUploadLimit` representing the limit that was reached.
|
||||
* `sent_bytes`: The number of bytes already sent during the period of the limit.
|
||||
* `attempted_bytes`: The number of bytes that the user attempted to send.
|
||||
|
||||
@@ -195,12 +195,15 @@ _Changed in Synapse v1.132.0: Added the `room_config` argument. Callbacks that o
|
||||
async def user_may_create_room(user_id: str, room_config: synapse.module_api.JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||
```
|
||||
|
||||
Called when processing a room creation request.
|
||||
Called when processing a room creation or room upgrade request.
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`).
|
||||
* `room_config`: The contents of the body of a [/createRoom request](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
||||
* `room_config`: The contents of the body of the [`/createRoom` request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
||||
For a [room upgrade request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3roomsroomidupgrade) it is a synthesised subset of what an equivalent
|
||||
`/createRoom` request would have looked like. Specifically, it contains the `creation_content` (linking to the previous room) and `initial_state` (containing a
|
||||
subset of the state of the previous room).
|
||||
|
||||
The callback must return one of:
|
||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||
|
||||
@@ -186,6 +186,7 @@ oidc_providers:
|
||||
4. Note the slug of your application, Client ID and Client Secret.
|
||||
|
||||
Note: RSA keys must be used for signing for Authentik, ECC keys do not work.
|
||||
Note: The provider must have a signing key set and must not use an encryption key.
|
||||
|
||||
Synapse config:
|
||||
```yaml
|
||||
@@ -204,6 +205,12 @@ oidc_providers:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
|
||||
[...]
|
||||
jwt_config:
|
||||
enabled: true
|
||||
secret: "your client secret" # TO BE FILLED (same as `client_secret` above)
|
||||
algorithm: "RS256"
|
||||
# (...other fields)
|
||||
```
|
||||
|
||||
### Dex
|
||||
|
||||
@@ -35,7 +35,7 @@ handlers:
|
||||
loggers:
|
||||
synapse:
|
||||
level: INFO
|
||||
handlers: [remote]
|
||||
handlers: [file]
|
||||
synapse.storage.SQL:
|
||||
level: WARNING
|
||||
```
|
||||
|
||||
@@ -117,6 +117,51 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.141.0
|
||||
|
||||
## Docker images now based on Debian `trixie` with Python 3.13
|
||||
|
||||
The Docker images are now based on Debian `trixie` and use Python 3.13. If you
|
||||
are using the Docker images as a base image you may need to e.g. adjust the
|
||||
paths you mount any additional Python packages at.
|
||||
|
||||
# Upgrading to v1.140.0
|
||||
|
||||
## Users of `synapse-s3-storage-provider` must update the module to `v1.6.0`
|
||||
|
||||
Deployments that make use of the
|
||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider/)
|
||||
module must update it to
|
||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0),
|
||||
otherwise users will be unable to upload or download media.
|
||||
|
||||
# Upgrading to v1.139.0
|
||||
|
||||
## `/register` requests from old application service implementations may break when using MAS
|
||||
|
||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
||||
/_matrix/client/v3/register` will receive the error
|
||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. This is a
|
||||
result of [MSC4190: Device management for application
|
||||
services](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) which
|
||||
adds new endpoints for application services to create encryption-ready devices
|
||||
with other than `/login` or `/register` without `inhibit_login=true`.
|
||||
|
||||
If an application service you use starts to fail with the mentioned error,
|
||||
ensure it is up to date. If it is, then kindly let the author know that they
|
||||
need to update their implementation to call `/register` with
|
||||
`inhibit_login=true`.
|
||||
|
||||
# Upgrading to v1.138.2
|
||||
|
||||
## Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin
|
||||
|
||||
Ubuntu 24.10 Oracular Oriole [has been end-of-life since 10 Jul
|
||||
2025](https://endoflife.date/ubuntu). This release drops support for Ubuntu
|
||||
24.10, and in its place adds support for Ubuntu 25.04 Plucky Puffin.
|
||||
|
||||
This notice also applies to the v1.139.0 release.
|
||||
|
||||
# Upgrading to v1.136.0
|
||||
|
||||
## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`
|
||||
|
||||
@@ -2006,9 +2006,8 @@ This setting has the following sub-options:
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_reports:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
@@ -2031,9 +2030,8 @@ This setting has the following sub-options:
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_room_creation:
|
||||
per_user:
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
@@ -2168,9 +2166,12 @@ max_upload_size: 60M
|
||||
### `media_upload_limits`
|
||||
|
||||
*(array)* A list of media upload limits defining how much data a given user can upload in a given time period.
|
||||
These limits are applied in addition to the `max_upload_size` limit above (which applies to individual uploads).
|
||||
|
||||
An empty list means no limits are applied.
|
||||
|
||||
These settings can be overridden using the `get_media_upload_limits_for_user` module API [callback](../../modules/media_repository_callbacks.md#get_media_upload_limits_for_user).
|
||||
|
||||
Defaults to `[]`.
|
||||
|
||||
Example configuration:
|
||||
@@ -2572,6 +2573,28 @@ Example configuration:
|
||||
turn_allow_guests: false
|
||||
```
|
||||
---
|
||||
### `matrix_rtc`
|
||||
|
||||
*(object)* Options related to MatrixRTC. Defaults to `{}`.
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `transports` (array): A list of transport types and arguments to use for MatrixRTC connections. Defaults to `[]`.
|
||||
|
||||
Options for each entry include:
|
||||
|
||||
* `type` (string): The type of transport to use to connect to the selective forwarding unit (SFU).
|
||||
|
||||
* `livekit_service_url` (string): The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
matrix_rtc:
|
||||
transports:
|
||||
- type: livekit
|
||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
||||
```
|
||||
---
|
||||
## Registration
|
||||
|
||||
Registration can be rate-limited using the parameters in the [Ratelimiting](#ratelimiting) section of this manual.
|
||||
|
||||
@@ -252,7 +252,7 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$
|
||||
^/_matrix/client/(r0|v3|unstable)/capabilities$
|
||||
^/_matrix/client/(r0|v3|unstable)/notifications$
|
||||
^/_synapse/admin/v1/rooms/
|
||||
^/_synapse/admin/v1/rooms/[^/]+$
|
||||
|
||||
# Encryption requests
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
||||
|
||||
564
poetry.lock
generated
564
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
@@ -34,15 +34,15 @@ tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" a
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.6.1"
|
||||
version = "1.6.5"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
|
||||
markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "authlib-1.6.1-py2.py3-none-any.whl", hash = "sha256:e9d2031c34c6309373ab845afc24168fe9e93dc52d252631f52642f21f5ed06e"},
|
||||
{file = "authlib-1.6.1.tar.gz", hash = "sha256:4dffdbb1460ba6ec8c17981a4c67af7d8af131231b5a36a88a1e8c80c111cdfd"},
|
||||
{file = "authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a"},
|
||||
{file = "authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -68,63 +68,75 @@ visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
version = "4.3.0"
|
||||
version = "5.0.0"
|
||||
description = "Modern password hashing for your software and your servers"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"},
|
||||
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"},
|
||||
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"},
|
||||
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"},
|
||||
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"},
|
||||
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"},
|
||||
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"},
|
||||
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"},
|
||||
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"},
|
||||
{file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e"},
|
||||
{file = "bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d"},
|
||||
{file = "bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9"},
|
||||
{file = "bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2"},
|
||||
{file = "bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927"},
|
||||
{file = "bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534"},
|
||||
{file = "bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4"},
|
||||
{file = "bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911"},
|
||||
{file = "bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4"},
|
||||
{file = "bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -435,7 +447,7 @@ description = "XML bomb protection for Python stdlib modules"
|
||||
optional = true
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
|
||||
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
|
||||
@@ -460,7 +472,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"},
|
||||
{file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"},
|
||||
@@ -511,7 +523,7 @@ description = "Python wrapper for hiredis"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"redis\""
|
||||
markers = "extra == \"redis\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "hiredis-3.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:add17efcbae46c5a6a13b244ff0b4a8fa079602ceb62290095c941b42e9d5dec"},
|
||||
{file = "hiredis-3.2.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:5fe955cc4f66c57df1ae8e5caf4de2925d43b5efab4e40859662311d1bcc5f54"},
|
||||
@@ -848,7 +860,7 @@ description = "Jaeger Python OpenTracing Tracer implementation"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
|
||||
]
|
||||
@@ -919,14 +931,14 @@ i18n = ["Babel (>=2.7)"]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.25.0"
|
||||
version = "4.25.1"
|
||||
description = "An implementation of JSON Schema validation for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716"},
|
||||
{file = "jsonschema-4.25.0.tar.gz", hash = "sha256:e63acf5c11762c0e6672ffb61482bdf57f0876684d8d249c0fe2d730d48bc55f"},
|
||||
{file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"},
|
||||
{file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -986,7 +998,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
|
||||
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
|
||||
{file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
|
||||
@@ -997,107 +1009,153 @@ pyasn1 = ">=0.4.6"
|
||||
|
||||
[[package]]
|
||||
name = "lxml"
|
||||
version = "6.0.0"
|
||||
version = "6.0.2"
|
||||
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"url-preview\""
|
||||
markers = "extra == \"url-preview\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "lxml-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:35bc626eec405f745199200ccb5c6b36f202675d204aa29bb52e27ba2b71dea8"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:246b40f8a4aec341cbbf52617cad8ab7c888d944bfe12a6abd2b1f6cfb6f6082"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2793a627e95d119e9f1e19720730472f5543a6d84c50ea33313ce328d870f2dd"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:46b9ed911f36bfeb6338e0b482e7fe7c27d362c52fde29f221fddbc9ee2227e7"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b4790b558bee331a933e08883c423f65bbcd07e278f91b2272489e31ab1e2b4"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2030956cf4886b10be9a0285c6802e078ec2391e1dd7ff3eb509c2c95a69b76"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23854ecf381ab1facc8f353dcd9adeddef3652268ee75297c1164c987c11dc"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:43fe5af2d590bf4691531b1d9a2495d7aab2090547eaacd224a3afec95706d76"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74e748012f8c19b47f7d6321ac929a9a94ee92ef12bc4298c47e8b7219b26541"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:43cfbb7db02b30ad3926e8fceaef260ba2fb7df787e38fa2df890c1ca7966c3b"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:34190a1ec4f1e84af256495436b2d196529c3f2094f0af80202947567fdbf2e7"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-win32.whl", hash = "sha256:5967fe415b1920a3877a4195e9a2b779249630ee49ece22021c690320ff07452"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:f3389924581d9a770c6caa4df4e74b606180869043b9073e2cec324bad6e306e"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:522fe7abb41309e9543b0d9b8b434f2b630c5fdaf6482bee642b34c8c70079c8"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ee56288d0df919e4aac43b539dd0e34bb55d6a12a6562038e8d6f3ed07f9e36"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8dd6dd0e9c1992613ccda2bcb74fc9d49159dbe0f0ca4753f37527749885c25"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d7ae472f74afcc47320238b5dbfd363aba111a525943c8a34a1b657c6be934c3"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5592401cdf3dc682194727c1ddaa8aa0f3ddc57ca64fd03226a430b955eab6f6"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58ffd35bd5425c3c3b9692d078bf7ab851441434531a7e517c4984d5634cd65b"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f720a14aa102a38907c6d5030e3d66b3b680c3e6f6bc95473931ea3c00c59967"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a5e8d207311a0170aca0eb6b160af91adc29ec121832e4ac151a57743a1e1e"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2dd1cc3ea7e60bfb31ff32cafe07e24839df573a5e7c2d33304082a5019bcd58"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cfcf84f1defed7e5798ef4f88aa25fcc52d279be731ce904789aa7ccfb7e8d2"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a52a4704811e2623b0324a18d41ad4b9fabf43ce5ff99b14e40a520e2190c851"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c16304bba98f48a28ae10e32a8e75c349dd742c45156f297e16eeb1ba9287a1f"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-win32.whl", hash = "sha256:f8d19565ae3eb956d84da3ef367aa7def14a2735d05bd275cd54c0301f0d0d6c"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2d71cdefda9424adff9a3607ba5bbfc60ee972d73c21c7e3c19e71037574816"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a2e76efbf8772add72d002d67a4c3d0958638696f541734304c7f28217a9cab"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6da7cd4f405fd7db56e51e96bff0865b9853ae70df0e6720624049da76bde2da"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b34339898bb556a2351a1830f88f751679f343eabf9cf05841c95b165152c9e7"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:51a5e4c61a4541bd1cd3ba74766d0c9b6c12d6a1a4964ef60026832aac8e79b3"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:21db1ec5525780fd07251636eb5f7acb84003e9382c72c18c542a87c416ade03"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4eb114a0754fd00075c12648d991ec7a4357f9cb873042cc9a77bf3a7e30c9db"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:7da298e1659e45d151b4028ad5c7974917e108afb48731f4ed785d02b6818994"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7bf61bc4345c1895221357af8f3e89f8c103d93156ef326532d35c707e2fb19d"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63b634facdfbad421d4b61c90735688465d4ab3a8853ac22c76ccac2baf98d97"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e380e85b93f148ad28ac15f8117e2fd8e5437aa7732d65e260134f83ce67911b"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-win32.whl", hash = "sha256:185efc2fed89cdd97552585c624d3c908f0464090f4b91f7d92f8ed2f3b18f54"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:f97487996a39cb18278ca33f7be98198f278d0bc3c5d0fd4d7b3d63646ca3c8a"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85b14a4689d5cff426c12eefe750738648706ea2753b20c2f973b2a000d3d261"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f64ccf593916e93b8d36ed55401bb7fe9c7d5de3180ce2e10b08f82a8f397316"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:b372d10d17a701b0945f67be58fae4664fd056b85e0ff0fbc1e6c951cdbc0512"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a674c0948789e9136d69065cc28009c1b1874c6ea340253db58be7622ce6398f"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:edf6e4c8fe14dfe316939711e3ece3f9a20760aabf686051b537a7562f4da91a"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:048a930eb4572829604982e39a0c7289ab5dc8abc7fc9f5aabd6fbc08c154e93"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0b5fa5eda84057a4f1bbb4bb77a8c28ff20ae7ce211588d698ae453e13c6281"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:c352fc8f36f7e9727db17adbf93f82499457b3d7e5511368569b4c5bd155a922"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8db5dc617cb937ae17ff3403c3a70a7de9df4852a046f93e71edaec678f721d0"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:2181e4b1d07dde53986023482673c0f1fba5178ef800f9ab95ad791e8bdded6a"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3c98d5b24c6095e89e03d65d5c574705be3d49c0d8ca10c17a8a4b5201b72f5"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-win32.whl", hash = "sha256:04d67ceee6db4bcb92987ccb16e53bef6b42ced872509f333c04fb58a3315256"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e0b1520ef900e9ef62e392dd3d7ae4f5fa224d1dd62897a792cf353eb20b6cae"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:e35e8aaaf3981489f42884b59726693de32dabfc438ac10ef4eb3409961fd402"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:dbdd7679a6f4f08152818043dbb39491d1af3332128b3752c3ec5cebc0011a72"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40442e2a4456e9910875ac12951476d36c0870dcb38a68719f8c4686609897c4"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db0efd6bae1c4730b9c863fc4f5f3c0fa3e8f05cae2c44ae141cb9dfc7d091dc"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ab542c91f5a47aaa58abdd8ea84b498e8e49fe4b883d67800017757a3eb78e8"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:013090383863b72c62a702d07678b658fa2567aa58d373d963cca245b017e065"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c86df1c9af35d903d2b52d22ea3e66db8058d21dc0f59842ca5deb0595921141"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4337e4aec93b7c011f7ee2e357b0d30562edd1955620fdd4aeab6aacd90d43c5"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ae74f7c762270196d2dda56f8dd7309411f08a4084ff2dfcc0b095a218df2e06"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:059c4cbf3973a621b62ea3132934ae737da2c132a788e6cfb9b08d63a0ef73f9"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f090a9bc0ce8da51a5632092f98a7e7f84bca26f33d161a98b57f7fb0004ca"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9da022c14baeec36edfcc8daf0e281e2f55b950249a455776f0d1adeeada4734"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a55da151d0b0c6ab176b4e761670ac0e2667817a1e0dadd04a01d0561a219349"},
|
||||
{file = "lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f952dacaa552f3bb8834908dddd500ba7d508e6ea6eb8c52eb2d28f48ca06a31"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:71695772df6acea9f3c0e59e44ba8ac50c4f125217e84aab21074a1a55e7e5c9"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f68764f35fd78d7c4cc4ef209a184c38b65440378013d24b8aecd327c3e0c8"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:058027e261afed589eddcfe530fcc6f3402d7fd7e89bfd0532df82ebc1563dba"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8ffaeec5dfea5881d4c9d8913a32d10cfe3923495386106e4a24d45300ef79c"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:f2e3b1a6bb38de0bc713edd4d612969dd250ca8b724be8d460001a387507021c"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d6690ec5ec1cce0385cb20896b16be35247ac8c2046e493d03232f1c2414d321"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2a50c3c1d11cad0ebebbac357a97b26aa79d2bcaf46f256551152aa85d3a4d1"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3efe1b21c7801ffa29a1112fab3b0f643628c30472d507f39544fd48e9549e34"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:59c45e125140b2c4b33920d21d83681940ca29f0b83f8629ea1a2196dc8cfe6a"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:452b899faa64f1805943ec1c0c9ebeaece01a1af83e130b69cdefeda180bb42c"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-win32.whl", hash = "sha256:1e786a464c191ca43b133906c6903a7e4d56bef376b75d97ccbb8ec5cf1f0a4b"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:dacf3c64ef3f7440e3167aa4b49aa9e0fb99e0aa4f9ff03795640bf94531bcb0"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-win_arm64.whl", hash = "sha256:45f93e6f75123f88d7f0cfd90f2d05f441b808562bf0bc01070a00f53f5028b5"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078"},
|
||||
{file = "lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f"},
|
||||
{file = "lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9b33d21594afab46f37ae58dfadd06636f154923c4e8a4d754b0127554eb2e77"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8963287d7a4c5c9a432ff487c52e9c5618667179c18a204bdedb27310f022f"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1941354d92699fb5ffe6ed7b32f9649e43c2feb4b97205f75866f7d21aa91452"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb2f6ca0ae2d983ded09357b84af659c954722bbf04dea98030064996d156048"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb2a12d704f180a902d7fa778c6d71f36ceb7b0d317f34cdc76a5d05aa1dd1df"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:6ec0e3f745021bfed19c456647f0298d60a24c9ff86d9d051f52b509663feeb1"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:846ae9a12d54e368933b9759052d6206a9e8b250291109c48e350c1f1f49d916"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef9266d2aa545d7374938fb5c484531ef5a2ec7f2d573e62f8ce722c735685fd"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:4077b7c79f31755df33b795dc12119cb557a0106bfdab0d2c2d97bd3cf3dffa6"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a7c5d5e5f1081955358533be077166ee97ed2571d6a66bdba6ec2f609a715d1a"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8f8d0cbd0674ee89863a523e6994ac25fd5be9c8486acfc3e5ccea679bad2679"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2cbcbf6d6e924c28f04a43f3b6f6e272312a090f269eff68a2982e13e5d57659"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dfb874cfa53340009af6bdd7e54ebc0d21012a60a4e65d927c2e477112e63484"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fb8dae0b6b8b7f9e96c26fdd8121522ce5de9bb5538010870bd538683d30e9a2"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:358d9adae670b63e95bc59747c72f4dc97c9ec58881d4627fe0120da0f90d314"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-win32.whl", hash = "sha256:e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7"},
|
||||
{file = "lxml-6.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b0c732aa23de8f8aec23f4b580d1e52905ef468afb4abeafd3fec77042abb6fe"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4468e3b83e10e0317a89a33d28f7aeba1caa4d1a6fd457d115dd4ffe90c5931d"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:abd44571493973bad4598a3be7e1d807ed45aa2adaf7ab92ab7c62609569b17d"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:370cd78d5855cfbffd57c422851f7d3864e6ae72d0da615fca4dad8c45d375a5"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:901e3b4219fa04ef766885fb40fa516a71662a4c61b80c94d25336b4934b71c0"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a4bf42d2e4cf52c28cc1812d62426b9503cdb0c87a6de81442626aa7d69707ba"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2c7fdaa4d7c3d886a42534adec7cfac73860b89b4e5298752f60aa5984641a0"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98a5e1660dc7de2200b00d53fa00bcd3c35a3608c305d45a7bbcaf29fa16e83d"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:dc051506c30b609238d79eda75ee9cab3e520570ec8219844a72a46020901e37"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8799481bbdd212470d17513a54d568f44416db01250f49449647b5ab5b5dccb9"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9261bb77c2dab42f3ecd9103951aeca2c40277701eb7e912c545c1b16e0e4917"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:65ac4a01aba353cfa6d5725b95d7aed6356ddc0a3cd734de00124d285b04b64f"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b22a07cbb82fea98f8a2fd814f3d1811ff9ed76d0fc6abc84eb21527596e7cc8"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d759cdd7f3e055d6bc8d9bec3ad905227b2e4c785dc16c372eb5b5e83123f48a"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:945da35a48d193d27c188037a05fec5492937f66fb1958c24fc761fb9d40d43c"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-win32.whl", hash = "sha256:be3aaa60da67e6153eb15715cc2e19091af5dc75faef8b8a585aea372507384b"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa25afbadead523f7001caf0c2382afd272c315a033a7b06336da2637d92d6ed"},
|
||||
{file = "lxml-6.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:063eccf89df5b24e361b123e257e437f9e9878f425ee9aae3144c77faf6da6d8"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:6162a86d86893d63084faaf4ff937b3daea233e3682fb4474db07395794fa80d"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:414aaa94e974e23a3e92e7ca5b97d10c0cf37b6481f50911032c69eeb3991bba"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48461bd21625458dd01e14e2c38dd0aea69addc3c4f960c30d9f59d7f93be601"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:25fcc59afc57d527cfc78a58f40ab4c9b8fd096a9a3f964d2781ffb6eb33f4ed"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5179c60288204e6ddde3f774a93350177e08876eaf3ab78aa3a3649d43eb7d37"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:967aab75434de148ec80597b75062d8123cadf2943fb4281f385141e18b21338"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d100fcc8930d697c6561156c6810ab4a508fb264c8b6779e6e61e2ed5e7558f9"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ca59e7e13e5981175b8b3e4ab84d7da57993eeff53c07764dcebda0d0e64ecd"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:957448ac63a42e2e49531b9d6c0fa449a1970dbc32467aaad46f11545be9af1d"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b7fc49c37f1786284b12af63152fe1d0990722497e2d5817acfe7a877522f9a9"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e19e0643cc936a22e837f79d01a550678da8377d7d801a14487c10c34ee49c7e"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:1db01e5cf14345628e0cbe71067204db658e2fb8e51e7f33631f5f4735fefd8d"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:875c6b5ab39ad5291588aed6925fac99d0097af0dd62f33c7b43736043d4a2ec"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:cdcbed9ad19da81c480dfd6dd161886db6096083c9938ead313d94b30aadf272"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80dadc234ebc532e09be1975ff538d154a7fa61ea5031c03d25178855544728f"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-win32.whl", hash = "sha256:da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca"},
|
||||
{file = "lxml-6.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c"},
|
||||
{file = "lxml-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a656ca105115f6b766bba324f23a67914d9c728dafec57638e2b92a9dcd76c62"},
|
||||
{file = "lxml-6.0.2-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c54d83a2188a10ebdba573f16bd97135d06c9ef60c3dc495315c7a28c80a263f"},
|
||||
{file = "lxml-6.0.2-cp38-cp38-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:1ea99340b3c729beea786f78c38f60f4795622f36e305d9c9be402201efdc3b7"},
|
||||
{file = "lxml-6.0.2-cp38-cp38-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:af85529ae8d2a453feee4c780d9406a5e3b17cee0dd75c18bd31adcd584debc3"},
|
||||
{file = "lxml-6.0.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fe659f6b5d10fb5a17f00a50eb903eb277a71ee35df4615db573c069bcf967ac"},
|
||||
{file = "lxml-6.0.2-cp38-cp38-win32.whl", hash = "sha256:5921d924aa5468c939d95c9814fa9f9b5935a6ff4e679e26aaf2951f74043512"},
|
||||
{file = "lxml-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:0aa7070978f893954008ab73bb9e3c24a7c56c054e00566a21b553dc18105fca"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2c8458c2cdd29589a8367c09c8f030f1d202be673f0ca224ec18590b3b9fb694"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3fee0851639d06276e6b387f1c190eb9d7f06f7f53514e966b26bae46481ec90"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b2142a376b40b6736dfc214fd2902409e9e3857eff554fed2d3c60f097e62a62"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6b5b39cc7e2998f968f05309e666103b53e2edd01df8dc51b90d734c0825444"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4aec24d6b72ee457ec665344a29acb2d35937d5192faebe429ea02633151aad"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:b42f4d86b451c2f9d06ffb4f8bbc776e04df3ba070b9fe2657804b1b40277c48"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cdaefac66e8b8f30e37a9b4768a391e1f8a16a7526d5bc77a7928408ef68e93"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:b738f7e648735714bbb82bdfd030203360cfeab7f6e8a34772b3c8c8b820568c"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:daf42de090d59db025af61ce6bdb2521f0f102ea0e6ea310f13c17610a97da4c"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:66328dabea70b5ba7e53d94aa774b733cf66686535f3bc9250a7aab53a91caaf"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:e237b807d68a61fc3b1e845407e27e5eb8ef69bc93fe8505337c1acb4ee300b6"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:ac02dc29fd397608f8eb15ac1610ae2f2f0154b03f631e6d724d9e2ad4ee2c84"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:817ef43a0c0b4a77bd166dc9a09a555394105ff3374777ad41f453526e37f9cb"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-win32.whl", hash = "sha256:bc532422ff26b304cfb62b328826bd995c96154ffd2bac4544f37dbb95ecaa8f"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:995e783eb0374c120f528f807443ad5a83a656a8624c467ea73781fc5f8a8304"},
|
||||
{file = "lxml-6.0.2-cp39-cp39-win_arm64.whl", hash = "sha256:08b9d5e803c2e4725ae9e8559ee880e5328ed61aa0935244e0515d7d9dbec0aa"},
|
||||
{file = "lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e748d4cf8fef2526bb2a589a417eba0c8674e29ffcb570ce2ceca44f1e567bf6"},
|
||||
{file = "lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4ddb1049fa0579d0cbd00503ad8c58b9ab34d1254c77bc6a5576d96ec7853dba"},
|
||||
{file = "lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cb233f9c95f83707dae461b12b720c1af9c28c2d19208e1be03387222151daf5"},
|
||||
{file = "lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc456d04db0515ce3320d714a1eac7a97774ff0849e7718b492d957da4631dd4"},
|
||||
{file = "lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2613e67de13d619fd283d58bda40bff0ee07739f624ffee8b13b631abf33083d"},
|
||||
{file = "lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:24a8e756c982c001ca8d59e87c80c4d9dcd4d9b44a4cbeb8d9be4482c514d41d"},
|
||||
{file = "lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700"},
|
||||
{file = "lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee"},
|
||||
{file = "lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f"},
|
||||
{file = "lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9"},
|
||||
{file = "lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a"},
|
||||
{file = "lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e"},
|
||||
{file = "lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -1243,7 +1301,7 @@ description = "An LDAP3 auth provider for Synapse"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
|
||||
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"},
|
||||
{file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"},
|
||||
@@ -1482,7 +1540,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
|
||||
]
|
||||
@@ -1531,14 +1589,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "9.0.11"
|
||||
version = "9.0.15"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "phonenumbers-9.0.11-py2.py3-none-any.whl", hash = "sha256:a8ebb2136f1f14dfdbadb98be01cb71b96f880dea011eb5e0921967fe3a23abf"},
|
||||
{file = "phonenumbers-9.0.11.tar.gz", hash = "sha256:6573858dcf0a7a2753a071375e154d9fc11791546c699b575af95d2ba7d84a1d"},
|
||||
{file = "phonenumbers-9.0.15-py2.py3-none-any.whl", hash = "sha256:269b73bc05258e8fd57582770b9559307099ea677c8f1dc5272476f661344776"},
|
||||
{file = "phonenumbers-9.0.15.tar.gz", hash = "sha256:345ff7f23768332d866f37732f815cdf1d33c7f0961246562a5c5b78c12c3ff3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1551,8 +1609,6 @@ groups = ["main"]
|
||||
files = [
|
||||
{file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"},
|
||||
@@ -1562,8 +1618,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"},
|
||||
@@ -1573,8 +1627,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"},
|
||||
@@ -1587,8 +1639,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"},
|
||||
@@ -1598,8 +1648,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"},
|
||||
@@ -1609,8 +1657,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"},
|
||||
@@ -1620,8 +1666,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"},
|
||||
@@ -1631,8 +1675,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"},
|
||||
@@ -1642,15 +1684,11 @@ files = [
|
||||
{file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"},
|
||||
@@ -1668,14 +1706,14 @@ xmp = ["defusedxml"]
|
||||
|
||||
[[package]]
|
||||
name = "prometheus-client"
|
||||
version = "0.22.1"
|
||||
version = "0.23.1"
|
||||
description = "Python client for the Prometheus monitoring system."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "prometheus_client-0.22.1-py3-none-any.whl", hash = "sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094"},
|
||||
{file = "prometheus_client-0.22.1.tar.gz", hash = "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28"},
|
||||
{file = "prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99"},
|
||||
{file = "prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -1688,7 +1726,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"postgres\""
|
||||
markers = "extra == \"postgres\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"},
|
||||
{file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"},
|
||||
@@ -1696,7 +1734,6 @@ files = [
|
||||
{file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"},
|
||||
{file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"},
|
||||
{file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"},
|
||||
{file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"},
|
||||
{file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"},
|
||||
{file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"},
|
||||
{file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"},
|
||||
@@ -1709,7 +1746,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
|
||||
files = [
|
||||
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
|
||||
]
|
||||
@@ -1725,7 +1762,7 @@ description = "A Simple library to enable psycopg2 compatability"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
|
||||
files = [
|
||||
{file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"},
|
||||
]
|
||||
@@ -1774,14 +1811,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.11.7"
|
||||
version = "2.11.10"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"},
|
||||
{file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"},
|
||||
{file = "pydantic-2.11.10-py3-none-any.whl", hash = "sha256:802a655709d49bd004c31e865ef37da30b540786a46bfce02333e0e24b5fe29a"},
|
||||
{file = "pydantic-2.11.10.tar.gz", hash = "sha256:dc280f0982fbda6c38fada4e476dc0a4f3aeaf9c6ad4c28df68a666ec3c61423"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1908,14 +1945,14 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
version = "2.7.0"
|
||||
version = "2.8.1"
|
||||
description = "Use the full Github API v3"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pygithub-2.7.0-py3-none-any.whl", hash = "sha256:40ecbfe26dc55cc34ab4b0ffa1d455e6f816ef9a2bc8d6f5ad18ce572f163700"},
|
||||
{file = "pygithub-2.7.0.tar.gz", hash = "sha256:7cd6eafabb09b5369afba3586d86b1f1ad6f1326d2ff01bc47bb26615dce4cbb"},
|
||||
{file = "pygithub-2.8.1-py3-none-any.whl", hash = "sha256:23a0a5bca93baef082e03411bf0ce27204c32be8bfa7abc92fe4a3e132936df0"},
|
||||
{file = "pygithub-2.8.1.tar.gz", hash = "sha256:341b7c78521cb07324ff670afd1baa2bf5c286f8d9fd302c1798ba594a5400c9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1984,7 +2021,7 @@ description = "A development tool to measure, monitor and analyze the memory beh
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"cache-memory\""
|
||||
markers = "extra == \"cache-memory\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"},
|
||||
{file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"},
|
||||
@@ -2044,7 +2081,7 @@ description = "Python implementation of SAML Version 2 Standard"
|
||||
optional = true
|
||||
python-versions = ">=3.9,<4.0"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"},
|
||||
{file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"},
|
||||
@@ -2069,7 +2106,7 @@ description = "Extensions to the standard Python datetime module"
|
||||
optional = true
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||
@@ -2097,7 +2134,7 @@ description = "World timezone definitions, modern and historical"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
|
||||
{file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
|
||||
@@ -2396,30 +2433,31 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.12.7"
|
||||
version = "0.12.10"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303"},
|
||||
{file = "ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb"},
|
||||
{file = "ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606"},
|
||||
{file = "ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8"},
|
||||
{file = "ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa"},
|
||||
{file = "ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5"},
|
||||
{file = "ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4"},
|
||||
{file = "ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77"},
|
||||
{file = "ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f"},
|
||||
{file = "ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69"},
|
||||
{file = "ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71"},
|
||||
{file = "ruff-0.12.10-py3-none-linux_armv6l.whl", hash = "sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b"},
|
||||
{file = "ruff-0.12.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1"},
|
||||
{file = "ruff-0.12.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839"},
|
||||
{file = "ruff-0.12.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844"},
|
||||
{file = "ruff-0.12.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db"},
|
||||
{file = "ruff-0.12.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e"},
|
||||
{file = "ruff-0.12.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559"},
|
||||
{file = "ruff-0.12.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf"},
|
||||
{file = "ruff-0.12.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b"},
|
||||
{file = "ruff-0.12.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9"},
|
||||
{file = "ruff-0.12.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a"},
|
||||
{file = "ruff-0.12.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60"},
|
||||
{file = "ruff-0.12.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56"},
|
||||
{file = "ruff-0.12.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9"},
|
||||
{file = "ruff-0.12.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b"},
|
||||
{file = "ruff-0.12.10-py3-none-win32.whl", hash = "sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266"},
|
||||
{file = "ruff-0.12.10-py3-none-win_amd64.whl", hash = "sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e"},
|
||||
{file = "ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc"},
|
||||
{file = "ruff-0.12.10.tar.gz", hash = "sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2462,7 +2500,7 @@ description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"sentry\""
|
||||
markers = "extra == \"sentry\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "sentry_sdk-2.34.1-py2.py3-none-any.whl", hash = "sha256:b7a072e1cdc5abc48101d5146e1ae680fa81fe886d8d95aaa25a0b450c818d32"},
|
||||
{file = "sentry_sdk-2.34.1.tar.gz", hash = "sha256:69274eb8c5c38562a544c3e9f68b5be0a43be4b697f5fd385bf98e4fbe672687"},
|
||||
@@ -2561,14 +2599,14 @@ type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.deve
|
||||
|
||||
[[package]]
|
||||
name = "setuptools-rust"
|
||||
version = "1.11.1"
|
||||
version = "1.12.0"
|
||||
description = "Setuptools Rust extension plugin"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "setuptools_rust-1.11.1-py3-none-any.whl", hash = "sha256:5eaaddaed268dc24a527ffa659ce56b22d3cf17b781247b779efd611031fe8ea"},
|
||||
{file = "setuptools_rust-1.11.1.tar.gz", hash = "sha256:7dabc4392252ced314b8050d63276e05fdc5d32398fc7d3cce1f6a6ac35b76c0"},
|
||||
{file = "setuptools_rust-1.12.0-py3-none-any.whl", hash = "sha256:7e7db90547f224a835b45f5ad90c983340828a345554a9a660bdb2de8605dcdd"},
|
||||
{file = "setuptools_rust-1.12.0.tar.gz", hash = "sha256:d94a93f0c97751c17014565f07bdc324bee45d396cd1bba83d8e7af92b945f0c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2650,7 +2688,7 @@ description = "Tornado IOLoop Backed Concurrent Futures"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"},
|
||||
{file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"},
|
||||
@@ -2666,7 +2704,7 @@ description = "Python bindings for the Apache Thrift RPC system"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
|
||||
]
|
||||
@@ -2728,7 +2766,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"},
|
||||
{file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"},
|
||||
@@ -2746,14 +2784,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "towncrier"
|
||||
version = "24.8.0"
|
||||
version = "25.8.0"
|
||||
description = "Building newsfiles for your project."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "towncrier-24.8.0-py3-none-any.whl", hash = "sha256:9343209592b839209cdf28c339ba45792fbfe9775b5f9c177462fd693e127d8d"},
|
||||
{file = "towncrier-24.8.0.tar.gz", hash = "sha256:013423ee7eed102b2f393c287d22d95f66f1a3ea10a4baa82d298001a7f18af3"},
|
||||
{file = "towncrier-25.8.0-py3-none-any.whl", hash = "sha256:b953d133d98f9aeae9084b56a3563fd2519dfc6ec33f61c9cd2c61ff243fb513"},
|
||||
{file = "towncrier-25.8.0.tar.gz", hash = "sha256:eef16d29f831ad57abb3ae32a0565739866219f1ebfbdd297d32894eb9940eb1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2793,20 +2831,20 @@ docs = ["sphinx", "sphinx-rtd-theme"]
|
||||
|
||||
[[package]]
|
||||
name = "twine"
|
||||
version = "6.1.0"
|
||||
version = "6.2.0"
|
||||
description = "Collection of utilities for publishing packages on PyPI"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "twine-6.1.0-py3-none-any.whl", hash = "sha256:a47f973caf122930bf0fbbf17f80b83bc1602c9ce393c7845f289a3001dc5384"},
|
||||
{file = "twine-6.1.0.tar.gz", hash = "sha256:be324f6272eff91d07ee93f251edf232fc647935dd585ac003539b42404a8dbd"},
|
||||
{file = "twine-6.2.0-py3-none-any.whl", hash = "sha256:418ebf08ccda9a8caaebe414433b0ba5e25eb5e4a927667122fbe8f829f985d8"},
|
||||
{file = "twine-6.2.0.tar.gz", hash = "sha256:e5ed0d2fd70c9959770dce51c8f39c8945c574e18173a7b81802dab51b4b75cf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
id = "*"
|
||||
importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""}
|
||||
keyring = {version = ">=15.1", markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\""}
|
||||
keyring = {version = ">=21.2.0", markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\""}
|
||||
packaging = ">=24.0"
|
||||
readme-renderer = ">=35.0"
|
||||
requests = ">=2.20"
|
||||
@@ -2816,7 +2854,7 @@ rich = ">=12.0.0"
|
||||
urllib3 = ">=1.26.0"
|
||||
|
||||
[package.extras]
|
||||
keyring = ["keyring (>=15.1)"]
|
||||
keyring = ["keyring (>=21.2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "twisted"
|
||||
@@ -2865,7 +2903,7 @@ description = "non-blocking redis client for python"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"redis\""
|
||||
markers = "extra == \"redis\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"},
|
||||
{file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"},
|
||||
@@ -2877,14 +2915,14 @@ twisted = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-bleach"
|
||||
version = "6.2.0.20250514"
|
||||
version = "6.2.0.20250809"
|
||||
description = "Typing stubs for bleach"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_bleach-6.2.0.20250514-py3-none-any.whl", hash = "sha256:380cb74f0db1e3c3b2e0cde217221108e975e07e95ef0970c9d41f7cd4e8ea3c"},
|
||||
{file = "types_bleach-6.2.0.20250514.tar.gz", hash = "sha256:38c2e51d9cac51dc70c1b66121a11f4dad8bbf47fbad494bb7a77d8b8f3c4323"},
|
||||
{file = "types_bleach-6.2.0.20250809-py3-none-any.whl", hash = "sha256:0b372a75117947d9ac8a31ae733fd0f8d92ec75c4772e7b37093ba3fa5b48fb9"},
|
||||
{file = "types_bleach-6.2.0.20250809.tar.gz", hash = "sha256:188d7a1119f6c953140b513ed57ba4213755695815472c19d0c22ac09c79b90b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2919,14 +2957,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-jsonschema"
|
||||
version = "4.25.0.20250720"
|
||||
version = "4.25.1.20250822"
|
||||
description = "Typing stubs for jsonschema"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_jsonschema-4.25.0.20250720-py3-none-any.whl", hash = "sha256:7d7897c715310d8bf9ae27a2cedba78bbb09e4cad83ce06d2aa79b73a88941df"},
|
||||
{file = "types_jsonschema-4.25.0.20250720.tar.gz", hash = "sha256:765a3b6144798fe3161fd8cbe570a756ed3e8c0e5adb7c09693eb49faad39dbd"},
|
||||
{file = "types_jsonschema-4.25.1.20250822-py3-none-any.whl", hash = "sha256:f82c2d7fa1ce1c0b84ba1de4ed6798469768188884db04e66421913a4e181294"},
|
||||
{file = "types_jsonschema-4.25.1.20250822.tar.gz", hash = "sha256:aac69ed4b23f49aaceb7fcb834141d61b9e4e6a7f6008cb2f0d3b831dfa8464a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2970,14 +3008,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-psycopg2"
|
||||
version = "2.9.21.20250718"
|
||||
version = "2.9.21.20250915"
|
||||
description = "Typing stubs for psycopg2"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_psycopg2-2.9.21.20250718-py3-none-any.whl", hash = "sha256:bcf085d4293bda48f5943a46dadf0389b2f98f7e8007722f7e1c12ee0f541858"},
|
||||
{file = "types_psycopg2-2.9.21.20250718.tar.gz", hash = "sha256:dc09a97272ef67e739e57b9f4740b761208f4514257e311c0b05c8c7a37d04b4"},
|
||||
{file = "types_psycopg2-2.9.21.20250915-py3-none-any.whl", hash = "sha256:eefe5ccdc693fc086146e84c9ba437bb278efe1ef330b299a0cb71169dc6c55f"},
|
||||
{file = "types_psycopg2-2.9.21.20250915.tar.gz", hash = "sha256:bfeb8f54c32490e7b5edc46215ab4163693192bc90407b4a023822de9239f5c8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2998,26 +3036,26 @@ types-cffi = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-pyyaml"
|
||||
version = "6.0.12.20250809"
|
||||
version = "6.0.12.20250915"
|
||||
description = "Typing stubs for PyYAML"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_pyyaml-6.0.12.20250809-py3-none-any.whl", hash = "sha256:032b6003b798e7de1a1ddfeefee32fac6486bdfe4845e0ae0e7fb3ee4512b52f"},
|
||||
{file = "types_pyyaml-6.0.12.20250809.tar.gz", hash = "sha256:af4a1aca028f18e75297da2ee0da465f799627370d74073e96fee876524f61b5"},
|
||||
{file = "types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6"},
|
||||
{file = "types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.4.20250611"
|
||||
version = "2.32.4.20250913"
|
||||
description = "Typing stubs for requests"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_requests-2.32.4.20250611-py3-none-any.whl", hash = "sha256:ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072"},
|
||||
{file = "types_requests-2.32.4.20250611.tar.gz", hash = "sha256:741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826"},
|
||||
{file = "types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1"},
|
||||
{file = "types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3025,26 +3063,26 @@ urllib3 = ">=2"
|
||||
|
||||
[[package]]
|
||||
name = "types-setuptools"
|
||||
version = "80.9.0.20250809"
|
||||
version = "80.9.0.20250822"
|
||||
description = "Typing stubs for setuptools"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_setuptools-80.9.0.20250809-py3-none-any.whl", hash = "sha256:7c6539b4c7ac7b4ab4db2be66d8a58fb1e28affa3ee3834be48acafd94f5976a"},
|
||||
{file = "types_setuptools-80.9.0.20250809.tar.gz", hash = "sha256:e986ba37ffde364073d76189e1d79d9928fb6f5278c7d07589cde353d0218864"},
|
||||
{file = "types_setuptools-80.9.0.20250822-py3-none-any.whl", hash = "sha256:53bf881cb9d7e46ed12c76ef76c0aaf28cfe6211d3fab12e0b83620b1a8642c3"},
|
||||
{file = "types_setuptools-80.9.0.20250822.tar.gz", hash = "sha256:070ea7716968ec67a84c7f7768d9952ff24d28b65b6594797a464f1b3066f965"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.14.1"
|
||||
version = "4.15.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.9+"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"},
|
||||
{file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"},
|
||||
{file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"},
|
||||
{file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3111,7 +3149,7 @@ description = "An XML Schema validator and decoder"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"},
|
||||
{file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"},
|
||||
@@ -3255,4 +3293,4 @@ url-preview = ["lxml"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.9.0"
|
||||
content-hash = "600a349d08dde732df251583094a121b5385eb43ae0c6ceff10dcf9749359446"
|
||||
content-hash = "0058b93ca13a3f2a0cfc28485ddd8202c42d0015dbaf3b9692e43f37fe2a0be6"
|
||||
|
||||
@@ -101,10 +101,10 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.137.0rc1"
|
||||
version = "1.140.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
|
||||
readme = "README.rst"
|
||||
repository = "https://github.com/element-hq/synapse"
|
||||
packages = [
|
||||
@@ -324,7 +324,7 @@ all = [
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
ruff = "0.12.7"
|
||||
ruff = "0.12.10"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
|
||||
@@ -356,7 +356,7 @@ click = ">=8.1.3"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
markdown-it-py = ">=3.0.0"
|
||||
pygithub = ">=1.55"
|
||||
pygithub = ">=1.59"
|
||||
# The following are executed as commands by the release script.
|
||||
twine = "*"
|
||||
# Towncrier min version comes from https://github.com/matrix-org/synapse/pull/3425. Rationale unclear.
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
use std::{collections::HashMap, future::Future};
|
||||
use std::{collections::HashMap, future::Future, sync::OnceLock};
|
||||
|
||||
use anyhow::Context;
|
||||
use futures::TryStreamExt;
|
||||
@@ -299,5 +299,22 @@ where
|
||||
});
|
||||
});
|
||||
|
||||
Ok(deferred)
|
||||
// Make the deferred follow the Synapse logcontext rules
|
||||
make_deferred_yieldable(py, &deferred)
|
||||
}
|
||||
|
||||
static MAKE_DEFERRED_YIELDABLE: OnceLock<pyo3::Py<pyo3::PyAny>> = OnceLock::new();
|
||||
|
||||
/// Given a deferred, make it follow the Synapse logcontext rules
|
||||
fn make_deferred_yieldable<'py>(
|
||||
py: Python<'py>,
|
||||
deferred: &Bound<'py, PyAny>,
|
||||
) -> PyResult<Bound<'py, PyAny>> {
|
||||
let make_deferred_yieldable = MAKE_DEFERRED_YIELDABLE.get_or_init(|| {
|
||||
let sys = PyModule::import(py, "synapse.logging.context").unwrap();
|
||||
let func = sys.getattr("make_deferred_yieldable").unwrap().unbind();
|
||||
func
|
||||
});
|
||||
|
||||
make_deferred_yieldable.call1(py, (deferred,))?.extract(py)
|
||||
}
|
||||
|
||||
@@ -289,10 +289,10 @@ pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
|
||||
default_enabled: true,
|
||||
}];
|
||||
|
||||
pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
pub const BASE_APPEND_POSTCONTENT_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/content/.io.element.msc4306.rule.unsubscribed_thread"),
|
||||
priority_class: 1,
|
||||
rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.unsubscribed_thread"),
|
||||
priority_class: 6,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(
|
||||
KnownCondition::Msc4306ThreadSubscription { subscribed: false },
|
||||
)]),
|
||||
@@ -301,8 +301,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/content/.io.element.msc4306.rule.subscribed_thread"),
|
||||
priority_class: 1,
|
||||
rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.subscribed_thread"),
|
||||
priority_class: 6,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(
|
||||
KnownCondition::Msc4306ThreadSubscription { subscribed: true },
|
||||
)]),
|
||||
@@ -310,6 +310,9 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.call"),
|
||||
priority_class: 1,
|
||||
@@ -726,6 +729,7 @@ lazy_static! {
|
||||
.iter()
|
||||
.chain(BASE_APPEND_OVERRIDE_RULES.iter())
|
||||
.chain(BASE_APPEND_CONTENT_RULES.iter())
|
||||
.chain(BASE_APPEND_POSTCONTENT_RULES.iter())
|
||||
.chain(BASE_APPEND_UNDERRIDE_RULES.iter())
|
||||
.map(|rule| { (&*rule.rule_id, rule) })
|
||||
.collect();
|
||||
|
||||
@@ -527,6 +527,7 @@ impl PushRules {
|
||||
.chain(base_rules::BASE_APPEND_OVERRIDE_RULES.iter())
|
||||
.chain(self.content.iter())
|
||||
.chain(base_rules::BASE_APPEND_CONTENT_RULES.iter())
|
||||
.chain(base_rules::BASE_APPEND_POSTCONTENT_RULES.iter())
|
||||
.chain(self.room.iter())
|
||||
.chain(self.sender.iter())
|
||||
.chain(self.underride.iter())
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.137/synapse-config.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.140/synapse-config.schema.json
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
@@ -2259,9 +2259,8 @@ properties:
|
||||
Setting this to a high value allows users to report content quickly, possibly in
|
||||
duplicate. This can result in higher database usage.
|
||||
default:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
examples:
|
||||
- per_second: 2.0
|
||||
burst_count: 20.0
|
||||
@@ -2270,9 +2269,8 @@ properties:
|
||||
description: >-
|
||||
Sets rate limits for how often users are able to create rooms.
|
||||
default:
|
||||
per_user:
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
examples:
|
||||
- per_second: 1.0
|
||||
burst_count: 5.0
|
||||
@@ -2415,8 +2413,15 @@ properties:
|
||||
A list of media upload limits defining how much data a given user can
|
||||
upload in a given time period.
|
||||
|
||||
These limits are applied in addition to the `max_upload_size` limit above
|
||||
(which applies to individual uploads).
|
||||
|
||||
|
||||
An empty list means no limits are applied.
|
||||
|
||||
|
||||
These settings can be overridden using the `get_media_upload_limits_for_user`
|
||||
module API [callback](../../modules/media_repository_callbacks.md#get_media_upload_limits_for_user).
|
||||
default: []
|
||||
items:
|
||||
time_period:
|
||||
@@ -2879,6 +2884,35 @@ properties:
|
||||
default: true
|
||||
examples:
|
||||
- false
|
||||
matrix_rtc:
|
||||
type: object
|
||||
description: >-
|
||||
Options related to MatrixRTC.
|
||||
properties:
|
||||
transports:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
required:
|
||||
- type
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
description: The type of transport to use to connect to the selective forwarding unit (SFU).
|
||||
example: livekit
|
||||
livekit_service_url:
|
||||
type: string
|
||||
description: >-
|
||||
The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
||||
example: https://matrix-rtc.example.com/livekit/jwt
|
||||
description:
|
||||
A list of transport types and arguments to use for MatrixRTC connections.
|
||||
default: []
|
||||
default: {}
|
||||
examples:
|
||||
- transports:
|
||||
- type: livekit
|
||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
||||
enable_registration:
|
||||
type: boolean
|
||||
description: >-
|
||||
|
||||
@@ -32,7 +32,7 @@ DISTS = (
|
||||
"debian:sid", # (rolling distro, no EOL)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
||||
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
|
||||
"ubuntu:oracular", # 24.10 (EOL 2025-07)
|
||||
"ubuntu:plucky", # 25.04 (EOL 2026-01)
|
||||
"debian:trixie", # (EOL not specified yet)
|
||||
)
|
||||
|
||||
|
||||
@@ -230,6 +230,7 @@ test_packages=(
|
||||
./tests/msc3967
|
||||
./tests/msc4140
|
||||
./tests/msc4155
|
||||
./tests/msc4306
|
||||
)
|
||||
|
||||
# Enable dirty runs, so tests will reuse the same container where possible.
|
||||
|
||||
@@ -68,6 +68,42 @@ PROMETHEUS_METRIC_MISSING_FROM_LIST_TO_CHECK = ErrorCode(
|
||||
category="per-homeserver-tenant-metrics",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_CALL_LATER = ErrorCode(
|
||||
"call-later-not-tracked",
|
||||
"Prefer using `synapse.util.Clock.call_later` instead of `reactor.callLater`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_LOOPING_CALL = ErrorCode(
|
||||
"prefer-synapse-clock-looping-call",
|
||||
"Prefer using `synapse.util.Clock.looping_call` instead of `task.LoopingCall`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING = ErrorCode(
|
||||
"prefer-synapse-clock-call-when-running",
|
||||
"Prefer using `synapse.util.Clock.call_when_running` instead of `reactor.callWhenRunning`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER = ErrorCode(
|
||||
"prefer-synapse-clock-add-system-event-trigger",
|
||||
"Prefer using `synapse.util.Clock.add_system_event_trigger` instead of `reactor.addSystemEventTrigger`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
MULTIPLE_INTERNAL_CLOCKS_CREATED = ErrorCode(
|
||||
"multiple-internal-clocks",
|
||||
"Only one instance of `clock.Clock` should be created",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
UNTRACKED_BACKGROUND_PROCESS = ErrorCode(
|
||||
"untracked-background-process",
|
||||
"Prefer using `HomeServer.run_as_background_process` method over the bare `run_as_background_process`",
|
||||
category="synapse-tracked-calls",
|
||||
)
|
||||
|
||||
|
||||
class Sentinel(enum.Enum):
|
||||
# defining a sentinel in this way allows mypy to correctly handle the
|
||||
@@ -210,6 +246,18 @@ class SynapsePlugin(Plugin):
|
||||
# callback, let's just pass it in while we have it.
|
||||
return lambda ctx: check_prometheus_metric_instantiation(ctx, fullname)
|
||||
|
||||
if fullname == "twisted.internet.task.LoopingCall":
|
||||
return check_looping_call
|
||||
|
||||
if fullname == "synapse.util.clock.Clock":
|
||||
return check_clock_creation
|
||||
|
||||
if (
|
||||
fullname
|
||||
== "synapse.metrics.background_process_metrics.run_as_background_process"
|
||||
):
|
||||
return check_background_process
|
||||
|
||||
return None
|
||||
|
||||
def get_method_signature_hook(
|
||||
@@ -229,9 +277,177 @@ class SynapsePlugin(Plugin):
|
||||
):
|
||||
return check_is_cacheable_wrapper
|
||||
|
||||
if fullname in (
|
||||
"twisted.internet.interfaces.IReactorTime.callLater",
|
||||
"synapse.types.ISynapseThreadlessReactor.callLater",
|
||||
"synapse.types.ISynapseReactor.callLater",
|
||||
):
|
||||
return check_call_later
|
||||
|
||||
if fullname in (
|
||||
"twisted.internet.interfaces.IReactorCore.callWhenRunning",
|
||||
"synapse.types.ISynapseThreadlessReactor.callWhenRunning",
|
||||
"synapse.types.ISynapseReactor.callWhenRunning",
|
||||
):
|
||||
return check_call_when_running
|
||||
|
||||
if fullname in (
|
||||
"twisted.internet.interfaces.IReactorCore.addSystemEventTrigger",
|
||||
"synapse.types.ISynapseThreadlessReactor.addSystemEventTrigger",
|
||||
"synapse.types.ISynapseReactor.addSystemEventTrigger",
|
||||
):
|
||||
return check_add_system_event_trigger
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def check_clock_creation(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the only `clock.Clock` instance is the one used by the `HomeServer`.
|
||||
This is so that the `HomeServer` can cancel any tracked delayed or looping calls
|
||||
during server shutdown.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Expected the only `clock.Clock` instance to be the one used by the `HomeServer`. "
|
||||
"This is so that the `HomeServer` can cancel any tracked delayed or looping calls "
|
||||
"during server shutdown",
|
||||
ctx.context,
|
||||
code=MULTIPLE_INTERNAL_CLOCKS_CREATED,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_call_later(ctx: MethodSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `reactor.callLater` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.call_later` should always be used instead of `reactor.callLater`.
|
||||
This is because the `synapse.util.Clock` tracks delayed calls in order to cancel any
|
||||
outstanding calls during server shutdown. Delayed calls which are either short lived
|
||||
(<~60s) or frequently called and can be tracked via other means could be candidates for
|
||||
using `synapse.util.Clock.call_later` with `call_later_cancel_on_shutdown` set to
|
||||
`False`. There shouldn't be a need to use `reactor.callLater` outside of tests or the
|
||||
`Clock` class itself. If a need arises, you can use a type ignore comment to disable the
|
||||
check, e.g. `# type: ignore[call-later-not-tracked]`.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Expected all `reactor.callLater` calls to use `synapse.util.Clock.call_later` "
|
||||
"instead. This is so that long lived calls can be tracked for cancellation during "
|
||||
"server shutdown",
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_CALL_LATER,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_looping_call(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `task.LoopingCall` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.looping_call` should always be used instead of `task.LoopingCall`.
|
||||
`synapse.util.Clock` tracks looping calls in order to cancel any outstanding calls
|
||||
during server shutdown.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Expected all `task.LoopingCall` instances to use `synapse.util.Clock.looping_call` "
|
||||
"instead. This is so that long lived calls can be tracked for cancellation during "
|
||||
"server shutdown",
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_LOOPING_CALL,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_call_when_running(ctx: MethodSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `reactor.callWhenRunning` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.call_when_running` should always be used instead of
|
||||
`reactor.callWhenRunning`.
|
||||
|
||||
Since `reactor.callWhenRunning` is a reactor callback, the callback will start out
|
||||
with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we
|
||||
want to know which server the logs came from.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
(
|
||||
"Expected all `reactor.callWhenRunning` calls to use `synapse.util.Clock.call_when_running` instead. "
|
||||
"This is so all Synapse code runs with a logcontext as we want to know which server the logs came from."
|
||||
),
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_add_system_event_trigger(ctx: MethodSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `reactor.addSystemEventTrigger` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.add_system_event_trigger` should always be used instead of
|
||||
`reactor.addSystemEventTrigger`.
|
||||
|
||||
Since `reactor.addSystemEventTrigger` is a reactor callback, the callback will start out
|
||||
with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we
|
||||
want to know which server the logs came from.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
(
|
||||
"Expected all `reactor.addSystemEventTrigger` calls to use `synapse.util.Clock.add_system_event_trigger` instead. "
|
||||
"This is so all Synapse code runs with a logcontext as we want to know which server the logs came from."
|
||||
),
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_background_process(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that calls to `run_as_background_process` use the `HomeServer` method.
|
||||
This is so that the `HomeServer` can cancel any running background processes during
|
||||
server shutdown.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Prefer using `HomeServer.run_as_background_process` method over the bare "
|
||||
"`run_as_background_process`. This is so that the `HomeServer` can cancel "
|
||||
"any background processes during server shutdown",
|
||||
ctx.context,
|
||||
code=UNTRACKED_BACKGROUND_PROCESS,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def analyze_prometheus_metric_classes(ctx: ClassDefContext) -> None:
|
||||
"""
|
||||
Cross-check the list of Prometheus metric classes against the
|
||||
|
||||
@@ -37,6 +37,8 @@ from typing import Any, List, Match, Optional, Union
|
||||
import attr
|
||||
import click
|
||||
import git
|
||||
import github
|
||||
import github.Auth
|
||||
from click.exceptions import ClickException
|
||||
from git import GitCommandError, Repo
|
||||
from github import BadCredentialsException, Github
|
||||
@@ -397,7 +399,7 @@ def _tag(gh_token: Optional[str]) -> None:
|
||||
return
|
||||
|
||||
# Create a new draft release
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
release = gh_repo.create_git_release(
|
||||
tag=tag_name,
|
||||
@@ -639,7 +641,16 @@ def _notify(message: str) -> None:
|
||||
|
||||
|
||||
@cli.command()
|
||||
def merge_back() -> None:
|
||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
||||
# receive an error when providing it, which is annoying as other commands accept
|
||||
# it.
|
||||
@click.option(
|
||||
"--gh-token",
|
||||
"_gh_token",
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def merge_back(_gh_token: Optional[str]) -> None:
|
||||
_merge_back()
|
||||
|
||||
|
||||
@@ -687,7 +698,16 @@ def _merge_back() -> None:
|
||||
|
||||
|
||||
@cli.command()
|
||||
def announce() -> None:
|
||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
||||
# receive an error when providing it, which is annoying as other commands accept
|
||||
# it.
|
||||
@click.option(
|
||||
"--gh-token",
|
||||
"_gh_token",
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def announce(_gh_token: Optional[str]) -> None:
|
||||
_announce()
|
||||
|
||||
|
||||
@@ -814,7 +834,7 @@ def check_valid_gh_token(gh_token: Optional[str]) -> None:
|
||||
return
|
||||
|
||||
try:
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(gh_token))
|
||||
|
||||
# We need to lookup name to trigger a request.
|
||||
_name = gh.get_user().name
|
||||
|
||||
@@ -30,7 +30,7 @@ from signedjson.sign import sign_json
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||
from synapse.util import json_encoder
|
||||
from synapse.util.json import json_encoder
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
@@ -153,9 +153,18 @@ def get_registered_paths_for_default(
|
||||
"""
|
||||
|
||||
hs = MockHomeserver(base_config, worker_app)
|
||||
|
||||
# TODO We only do this to avoid an error, but don't need the database etc
|
||||
hs.setup()
|
||||
return get_registered_paths_for_hs(hs)
|
||||
registered_paths = get_registered_paths_for_hs(hs)
|
||||
# NOTE: a more robust implementation would properly shutdown/cleanup each server
|
||||
# to avoid resource buildup.
|
||||
# However, the call to `shutdown` is `async` so it would require additional complexity here.
|
||||
# We are intentionally skipping this cleanup because this is a short-lived, one-off
|
||||
# utility script where the simpler approach is sufficient and we shouldn't run into
|
||||
# any resource buildup issues.
|
||||
|
||||
return registered_paths
|
||||
|
||||
|
||||
def elide_http_methods_if_unconflicting(
|
||||
|
||||
@@ -73,8 +73,18 @@ def main() -> None:
|
||||
|
||||
pw = unicodedata.normalize("NFKC", password)
|
||||
|
||||
bytes_to_hash = pw.encode("utf8") + password_pepper.encode("utf8")
|
||||
if len(bytes_to_hash) > 72:
|
||||
# bcrypt only looks at the first 72 bytes
|
||||
print(
|
||||
f"Password is too long ({len(bytes_to_hash)} bytes); truncating to 72 bytes for bcrypt. "
|
||||
"This is expected behaviour and will not affect a user's ability to log in. 72 bytes is "
|
||||
"sufficient entropy for a password."
|
||||
)
|
||||
bytes_to_hash = bytes_to_hash[:72]
|
||||
|
||||
hashed = bcrypt.hashpw(
|
||||
pw.encode("utf8") + password_pepper.encode("utf8"),
|
||||
bytes_to_hash,
|
||||
bcrypt.gensalt(bcrypt_rounds),
|
||||
).decode("ascii")
|
||||
|
||||
|
||||
@@ -54,11 +54,11 @@ from twisted.internet import defer, reactor as reactor_
|
||||
from synapse.config.database import DatabaseConnectionConfig
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.logging.context import (
|
||||
LoggingContext,
|
||||
make_deferred_yieldable,
|
||||
run_in_background,
|
||||
)
|
||||
from synapse.notifier import ReplicationNotifier
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
||||
from synapse.storage.databases.main import FilteringWorkerStore
|
||||
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
|
||||
@@ -98,7 +98,6 @@ from synapse.storage.databases.state.bg_updates import StateBackgroundUpdateStor
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.prepare_database import prepare_database
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import SYNAPSE_VERSION, Clock
|
||||
|
||||
# Cast safety: Twisted does some naughty magic which replaces the
|
||||
# twisted.internet.reactor module with a Reactor instance at runtime.
|
||||
@@ -317,27 +316,15 @@ class Store(
|
||||
)
|
||||
|
||||
|
||||
class MockHomeserver:
|
||||
class MockHomeserver(HomeServer):
|
||||
DATASTORE_CLASS = DataStore
|
||||
|
||||
def __init__(self, config: HomeServerConfig):
|
||||
self.clock = Clock(reactor)
|
||||
self.config = config
|
||||
self.hostname = config.server.server_name
|
||||
self.version_string = SYNAPSE_VERSION
|
||||
|
||||
def get_clock(self) -> Clock:
|
||||
return self.clock
|
||||
|
||||
def get_reactor(self) -> ISynapseReactor:
|
||||
return reactor
|
||||
|
||||
def get_instance_name(self) -> str:
|
||||
return "master"
|
||||
|
||||
def should_send_federation(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_replication_notifier(self) -> ReplicationNotifier:
|
||||
return ReplicationNotifier()
|
||||
super().__init__(
|
||||
hostname=config.server.server_name,
|
||||
config=config,
|
||||
reactor=reactor,
|
||||
)
|
||||
|
||||
|
||||
class Porter:
|
||||
@@ -346,12 +333,12 @@ class Porter:
|
||||
sqlite_config: Dict[str, Any],
|
||||
progress: "Progress",
|
||||
batch_size: int,
|
||||
hs_config: HomeServerConfig,
|
||||
hs: HomeServer,
|
||||
):
|
||||
self.sqlite_config = sqlite_config
|
||||
self.progress = progress
|
||||
self.batch_size = batch_size
|
||||
self.hs_config = hs_config
|
||||
self.hs = hs
|
||||
|
||||
async def setup_table(self, table: str) -> Tuple[str, int, int, int, int]:
|
||||
if table in APPEND_ONLY_TABLES:
|
||||
@@ -671,8 +658,7 @@ class Porter:
|
||||
|
||||
engine = create_engine(db_config.config)
|
||||
|
||||
hs = MockHomeserver(self.hs_config)
|
||||
server_name = hs.hostname
|
||||
server_name = self.hs.hostname
|
||||
|
||||
with make_conn(
|
||||
db_config=db_config,
|
||||
@@ -683,9 +669,17 @@ class Porter:
|
||||
engine.check_database(
|
||||
db_conn, allow_outdated_version=allow_outdated_version
|
||||
)
|
||||
prepare_database(db_conn, engine, config=self.hs_config)
|
||||
prepare_database(db_conn, engine, config=self.hs.config)
|
||||
# Type safety: ignore that we're using Mock homeservers here.
|
||||
store = Store(DatabasePool(hs, db_config, engine), db_conn, hs) # type: ignore[arg-type]
|
||||
store = Store(
|
||||
DatabasePool(
|
||||
self.hs,
|
||||
db_config,
|
||||
engine,
|
||||
),
|
||||
db_conn,
|
||||
self.hs,
|
||||
)
|
||||
db_conn.commit()
|
||||
|
||||
return store
|
||||
@@ -782,7 +776,7 @@ class Porter:
|
||||
return
|
||||
|
||||
self.postgres_store = self.build_db_store(
|
||||
self.hs_config.database.get_single_database()
|
||||
self.hs.config.database.get_single_database()
|
||||
)
|
||||
|
||||
await self.remove_ignored_background_updates_from_database()
|
||||
@@ -1571,6 +1565,8 @@ def main() -> None:
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
hs = MockHomeserver(config)
|
||||
|
||||
def start(stdscr: Optional["curses.window"] = None) -> None:
|
||||
progress: Progress
|
||||
if stdscr:
|
||||
@@ -1582,15 +1578,14 @@ def main() -> None:
|
||||
sqlite_config=sqlite_config,
|
||||
progress=progress,
|
||||
batch_size=args.batch_size,
|
||||
hs_config=config,
|
||||
hs=hs,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run() -> Generator["defer.Deferred[Any]", Any, None]:
|
||||
with LoggingContext("synapse_port_db_run"):
|
||||
yield defer.ensureDeferred(porter.run())
|
||||
yield defer.ensureDeferred(porter.run())
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
hs.get_clock().call_when_running(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
|
||||
@@ -28,11 +28,9 @@ import yaml
|
||||
from twisted.internet import defer, reactor as reactor_
|
||||
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import SYNAPSE_VERSION
|
||||
|
||||
# Cast safety: Twisted does some naughty magic which replaces the
|
||||
# twisted.internet.reactor module with a Reactor instance at runtime.
|
||||
@@ -48,12 +46,10 @@ class MockHomeserver(HomeServer):
|
||||
hostname=config.server.server_name,
|
||||
config=config,
|
||||
reactor=reactor,
|
||||
version_string=f"Synapse/{SYNAPSE_VERSION}",
|
||||
)
|
||||
|
||||
|
||||
def run_background_updates(hs: HomeServer) -> None:
|
||||
server_name = hs.hostname
|
||||
main = hs.get_datastores().main
|
||||
state = hs.get_datastores().state
|
||||
|
||||
@@ -67,14 +63,13 @@ def run_background_updates(hs: HomeServer) -> None:
|
||||
def run() -> None:
|
||||
# Apply all background updates on the database.
|
||||
defer.ensureDeferred(
|
||||
run_as_background_process(
|
||||
hs.run_as_background_process(
|
||||
"background_updates",
|
||||
server_name,
|
||||
run_background_updates,
|
||||
)
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
hs.get_clock().call_when_running(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
@@ -120,6 +115,13 @@ def main() -> None:
|
||||
# DB.
|
||||
hs.setup()
|
||||
|
||||
# This will cause all of the relevant storage classes to be instantiated and call
|
||||
# `register_background_update_handler(...)`,
|
||||
# `register_background_index_update(...)`,
|
||||
# `register_background_validate_constraint(...)`, etc so they are available to use
|
||||
# if we are asked to run those background updates.
|
||||
hs.get_storage_controllers()
|
||||
|
||||
if args.run_background_updates:
|
||||
run_background_updates(hs)
|
||||
|
||||
|
||||
@@ -302,12 +302,9 @@ class BaseAuth:
|
||||
(the user_id URI parameter allows an application service to masquerade
|
||||
any applicable user in its namespace)
|
||||
- what device the application service should be treated as controlling
|
||||
(the device_id[^1] URI parameter allows an application service to masquerade
|
||||
(the device_id URI parameter allows an application service to masquerade
|
||||
as any device that exists for the relevant user)
|
||||
|
||||
[^1] Unstable and provided by MSC3202.
|
||||
Must use `org.matrix.msc3202.device_id` in place of `device_id` for now.
|
||||
|
||||
Returns:
|
||||
the application service `Requester` of that request
|
||||
|
||||
@@ -319,7 +316,8 @@ class BaseAuth:
|
||||
- The returned device ID, if present, has been checked to be a valid device ID
|
||||
for the returned user ID.
|
||||
"""
|
||||
DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
# TODO: We can drop unstable support after 2026-01-01 (couple months after stable support)
|
||||
UNSTABLE_DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
|
||||
app_service = self.store.get_app_service_by_token(access_token)
|
||||
if app_service is None:
|
||||
@@ -341,26 +339,24 @@ class BaseAuth:
|
||||
else:
|
||||
effective_user_id = app_service.sender
|
||||
|
||||
effective_device_id: Optional[str] = None
|
||||
|
||||
if (
|
||||
self.hs.config.experimental.msc3202_device_masquerading_enabled
|
||||
and DEVICE_ID_ARG_NAME in request.args
|
||||
):
|
||||
effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8")
|
||||
effective_device_id_args = request.args.get(
|
||||
b"device_id", request.args.get(UNSTABLE_DEVICE_ID_ARG_NAME)
|
||||
)
|
||||
if effective_device_id_args:
|
||||
effective_device_id = effective_device_id_args[0].decode("utf8")
|
||||
# We only just set this so it can't be None!
|
||||
assert effective_device_id is not None
|
||||
device_opt = await self.store.get_device(
|
||||
effective_user_id, effective_device_id
|
||||
)
|
||||
if device_opt is None:
|
||||
# For now, use 400 M_EXCLUSIVE if the device doesn't exist.
|
||||
# This is an open thread of discussion on MSC3202 as of 2021-12-09.
|
||||
raise AuthError(
|
||||
400,
|
||||
f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})",
|
||||
Codes.EXCLUSIVE,
|
||||
Codes.UNKNOWN_DEVICE,
|
||||
)
|
||||
else:
|
||||
effective_device_id = None
|
||||
|
||||
return create_requester(
|
||||
effective_user_id, app_service=app_service, device_id=effective_device_id
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING, Optional, Set
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
@@ -33,7 +33,6 @@ from synapse.api.errors import (
|
||||
UnrecognizedRequestError,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import (
|
||||
active_span,
|
||||
force_tracing,
|
||||
@@ -43,9 +42,9 @@ from synapse.logging.opentracing import (
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.synapse_rust.http_client import HttpClient
|
||||
from synapse.types import JsonDict, Requester, UserID, create_requester
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
|
||||
from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
|
||||
from synapse.util.json import json_decoder
|
||||
|
||||
from . import introspection_response_timer
|
||||
|
||||
@@ -57,8 +56,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# Scope as defined by MSC2967
|
||||
# https://github.com/matrix-org/matrix-spec-proposals/pull/2967
|
||||
SCOPE_MATRIX_API = "urn:matrix:org.matrix.msc2967.client:api:*"
|
||||
SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:org.matrix.msc2967.client:device:"
|
||||
UNSTABLE_SCOPE_MATRIX_API = "urn:matrix:org.matrix.msc2967.client:api:*"
|
||||
UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:org.matrix.msc2967.client:device:"
|
||||
STABLE_SCOPE_MATRIX_API = "urn:matrix:client:api:*"
|
||||
STABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:client:device:"
|
||||
|
||||
|
||||
class ServerMetadata(BaseModel):
|
||||
@@ -227,13 +228,12 @@ class MasDelegatedAuth(BaseAuth):
|
||||
try:
|
||||
with start_active_span("mas-introspect-token"):
|
||||
inject_request_headers(raw_headers)
|
||||
with PreserveLoggingContext():
|
||||
resp_body = await self._rust_http_client.post(
|
||||
url=self._introspection_endpoint,
|
||||
response_limit=1 * 1024 * 1024,
|
||||
headers=raw_headers,
|
||||
request_body=body,
|
||||
)
|
||||
resp_body = await self._rust_http_client.post(
|
||||
url=self._introspection_endpoint,
|
||||
response_limit=1 * 1024 * 1024,
|
||||
headers=raw_headers,
|
||||
request_body=body,
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels(
|
||||
@@ -334,7 +334,10 @@ class MasDelegatedAuth(BaseAuth):
|
||||
scope = introspection_result.get_scope_set()
|
||||
|
||||
# Determine type of user based on presence of particular scopes
|
||||
if SCOPE_MATRIX_API not in scope:
|
||||
if (
|
||||
UNSTABLE_SCOPE_MATRIX_API not in scope
|
||||
and STABLE_SCOPE_MATRIX_API not in scope
|
||||
):
|
||||
raise InvalidClientTokenError(
|
||||
"Token doesn't grant access to the Matrix C-S API"
|
||||
)
|
||||
@@ -366,11 +369,12 @@ class MasDelegatedAuth(BaseAuth):
|
||||
# We only allow a single device_id in the scope, so we find them all in the
|
||||
# scope list, and raise if there are more than one. The OIDC server should be
|
||||
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
||||
device_ids = [
|
||||
tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :]
|
||||
for tok in scope
|
||||
if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX)
|
||||
]
|
||||
device_ids: Set[str] = set()
|
||||
for tok in scope:
|
||||
if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX):
|
||||
device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])
|
||||
elif tok.startswith(STABLE_SCOPE_MATRIX_DEVICE_PREFIX):
|
||||
device_ids.add(tok[len(STABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])
|
||||
|
||||
if len(device_ids) > 1:
|
||||
raise AuthError(
|
||||
@@ -378,7 +382,7 @@ class MasDelegatedAuth(BaseAuth):
|
||||
"Multiple device IDs in scope",
|
||||
)
|
||||
|
||||
device_id = device_ids[0] if device_ids else None
|
||||
device_id = next(iter(device_ids), None)
|
||||
|
||||
if device_id is not None:
|
||||
# Sanity check the device_id
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from authlib.oauth2 import ClientAuth
|
||||
@@ -34,12 +34,10 @@ from synapse.api.errors import (
|
||||
AuthError,
|
||||
HttpResponseException,
|
||||
InvalidClientTokenError,
|
||||
OAuthInsufficientScopeError,
|
||||
SynapseError,
|
||||
UnrecognizedRequestError,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import (
|
||||
active_span,
|
||||
force_tracing,
|
||||
@@ -49,9 +47,9 @@ from synapse.logging.opentracing import (
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.synapse_rust.http_client import HttpClient
|
||||
from synapse.types import Requester, UserID, create_requester
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
|
||||
from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
|
||||
from synapse.util.json import json_decoder
|
||||
|
||||
from . import introspection_response_timer
|
||||
|
||||
@@ -63,9 +61,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# Scope as defined by MSC2967
|
||||
# https://github.com/matrix-org/matrix-spec-proposals/pull/2967
|
||||
SCOPE_MATRIX_API = "urn:matrix:org.matrix.msc2967.client:api:*"
|
||||
SCOPE_MATRIX_GUEST = "urn:matrix:org.matrix.msc2967.client:api:guest"
|
||||
SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:org.matrix.msc2967.client:device:"
|
||||
UNSTABLE_SCOPE_MATRIX_API = "urn:matrix:org.matrix.msc2967.client:api:*"
|
||||
UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:org.matrix.msc2967.client:device:"
|
||||
STABLE_SCOPE_MATRIX_API = "urn:matrix:client:api:*"
|
||||
STABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:client:device:"
|
||||
|
||||
# Scope which allows access to the Synapse admin API
|
||||
SCOPE_SYNAPSE_ADMIN = "urn:synapse:admin:*"
|
||||
@@ -327,13 +326,12 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
try:
|
||||
with start_active_span("mas-introspect-token"):
|
||||
inject_request_headers(raw_headers)
|
||||
with PreserveLoggingContext():
|
||||
resp_body = await self._rust_http_client.post(
|
||||
url=uri,
|
||||
response_limit=1 * 1024 * 1024,
|
||||
headers=raw_headers,
|
||||
request_body=body,
|
||||
)
|
||||
resp_body = await self._rust_http_client.post(
|
||||
url=uri,
|
||||
response_limit=1 * 1024 * 1024,
|
||||
headers=raw_headers,
|
||||
request_body=body,
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels(
|
||||
@@ -444,9 +442,6 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
if not self._is_access_token_the_admin_token(access_token):
|
||||
await self._record_request(request, requester)
|
||||
|
||||
if not allow_guest and requester.is_guest:
|
||||
raise OAuthInsufficientScopeError([SCOPE_MATRIX_API])
|
||||
|
||||
request.requester = requester
|
||||
|
||||
return requester
|
||||
@@ -528,10 +523,11 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
scope: List[str] = introspection_result.get_scope_list()
|
||||
|
||||
# Determine type of user based on presence of particular scopes
|
||||
has_user_scope = SCOPE_MATRIX_API in scope
|
||||
has_guest_scope = SCOPE_MATRIX_GUEST in scope
|
||||
has_user_scope = (
|
||||
UNSTABLE_SCOPE_MATRIX_API in scope or STABLE_SCOPE_MATRIX_API in scope
|
||||
)
|
||||
|
||||
if not has_user_scope and not has_guest_scope:
|
||||
if not has_user_scope:
|
||||
raise InvalidClientTokenError("No scope in token granting user rights")
|
||||
|
||||
# Match via the sub claim
|
||||
@@ -579,11 +575,12 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# We only allow a single device_id in the scope, so we find them all in the
|
||||
# scope list, and raise if there are more than one. The OIDC server should be
|
||||
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
||||
device_ids = [
|
||||
tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :]
|
||||
for tok in scope
|
||||
if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX)
|
||||
]
|
||||
device_ids: Set[str] = set()
|
||||
for tok in scope:
|
||||
if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX):
|
||||
device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])
|
||||
elif tok.startswith(STABLE_SCOPE_MATRIX_DEVICE_PREFIX):
|
||||
device_ids.add(tok[len(STABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])
|
||||
|
||||
if len(device_ids) > 1:
|
||||
raise AuthError(
|
||||
@@ -591,7 +588,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
"Multiple device IDs in scope",
|
||||
)
|
||||
|
||||
device_id = device_ids[0] if device_ids else None
|
||||
device_id = next(iter(device_ids), None)
|
||||
|
||||
if device_id is not None:
|
||||
# Sanity check the device_id
|
||||
@@ -617,5 +614,4 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
user_id=user_id,
|
||||
device_id=device_id,
|
||||
scope=scope,
|
||||
is_guest=(has_guest_scope and not has_user_scope),
|
||||
)
|
||||
|
||||
@@ -30,7 +30,7 @@ from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.json import json_decoder
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
@@ -140,12 +140,18 @@ class Codes(str, Enum):
|
||||
# Part of MSC4155
|
||||
INVITE_BLOCKED = "ORG.MATRIX.MSC4155.M_INVITE_BLOCKED"
|
||||
|
||||
# Part of MSC4190
|
||||
APPSERVICE_LOGIN_UNSUPPORTED = "IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED"
|
||||
|
||||
# Part of MSC4306: Thread Subscriptions
|
||||
MSC4306_CONFLICTING_UNSUBSCRIPTION = (
|
||||
"IO.ELEMENT.MSC4306.M_CONFLICTING_UNSUBSCRIPTION"
|
||||
)
|
||||
MSC4306_NOT_IN_THREAD = "IO.ELEMENT.MSC4306.M_NOT_IN_THREAD"
|
||||
|
||||
# Part of MSC4326
|
||||
UNKNOWN_DEVICE = "ORG.MATRIX.MSC4326.M_UNKNOWN_DEVICE"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
"""An exception with integer code, a message string attributes and optional headers.
|
||||
|
||||
@@ -26,7 +26,7 @@ from synapse.api.errors import LimitExceededError
|
||||
from synapse.config.ratelimiting import RatelimitSettings
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import Requester
|
||||
from synapse.util import Clock
|
||||
from synapse.util.clock import Clock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# To avoid circular imports:
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
"""Contains the URL paths to prefix various aspects of the server with."""
|
||||
|
||||
import hmac
|
||||
import urllib.parse
|
||||
from hashlib import sha256
|
||||
from typing import Optional
|
||||
from urllib.parse import urlencode, urljoin
|
||||
@@ -96,11 +97,21 @@ class LoginSSORedirectURIBuilder:
|
||||
serialized_query_parameters = urlencode({"redirectUrl": client_redirect_url})
|
||||
|
||||
if idp_id:
|
||||
# Since this is a user-controlled string, make it safe to include in a URL path.
|
||||
url_encoded_idp_id = urllib.parse.quote(
|
||||
idp_id,
|
||||
# Since this defaults to `safe="/"`, we have to override it. We're
|
||||
# working with an individual URL path parameter so there shouldn't be
|
||||
# any slashes in it which could change the request path.
|
||||
safe="",
|
||||
encoding="utf8",
|
||||
)
|
||||
|
||||
resultant_url = urljoin(
|
||||
# We have to add a trailing slash to the base URL to ensure that the
|
||||
# last path segment is not stripped away when joining with another path.
|
||||
f"{base_url}/",
|
||||
f"{idp_id}?{serialized_query_parameters}",
|
||||
f"{url_encoded_idp_id}?{serialized_query_parameters}",
|
||||
)
|
||||
else:
|
||||
resultant_url = f"{base_url}?{serialized_query_parameters}"
|
||||
|
||||
@@ -28,6 +28,7 @@ import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from textwrap import indent
|
||||
from threading import Thread
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@@ -40,6 +41,7 @@ from typing import (
|
||||
Tuple,
|
||||
cast,
|
||||
)
|
||||
from wsgiref.simple_server import WSGIServer
|
||||
|
||||
from cryptography.utils import CryptographyDeprecationWarning
|
||||
from typing_extensions import ParamSpec
|
||||
@@ -62,7 +64,6 @@ from twisted.web.resource import Resource
|
||||
import synapse.util.caches
|
||||
from synapse.api.constants import MAX_PDU_SIZE
|
||||
from synapse.app import check_bind_error
|
||||
from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config._base import format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
@@ -72,8 +73,7 @@ from synapse.events.auto_accept_invites import InviteAutoAccepter
|
||||
from synapse.events.presence_router import load_legacy_presence_router
|
||||
from synapse.handlers.auth import load_legacy_password_auth_providers
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.logging.context import LoggingContext, PreserveLoggingContext
|
||||
from synapse.metrics import install_gc_manager, register_threadpool
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.metrics.jemalloc import setup_jemalloc_stats
|
||||
@@ -98,22 +98,47 @@ reactor = cast(ISynapseReactor, _reactor)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# list of tuples of function, args list, kwargs dict
|
||||
_sighup_callbacks: List[
|
||||
Tuple[Callable[..., None], Tuple[object, ...], Dict[str, object]]
|
||||
] = []
|
||||
_instance_id_to_sighup_callbacks_map: Dict[
|
||||
str, List[Tuple[Callable[..., None], Tuple[object, ...], Dict[str, object]]]
|
||||
] = {}
|
||||
"""
|
||||
Map from homeserver instance_id to a list of callbacks.
|
||||
|
||||
We use `instance_id` instead of `server_name` because it's possible to have multiple
|
||||
workers running in the same process with the same `server_name`.
|
||||
"""
|
||||
P = ParamSpec("P")
|
||||
|
||||
|
||||
def register_sighup(func: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None:
|
||||
def register_sighup(
|
||||
homeserver_instance_id: str,
|
||||
func: Callable[P, None],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> None:
|
||||
"""
|
||||
Register a function to be called when a SIGHUP occurs.
|
||||
|
||||
Args:
|
||||
homeserver_instance_id: The unique ID for this Synapse process instance
|
||||
(`hs.get_instance_id()`) that this hook is associated with.
|
||||
func: Function to be called when sent a SIGHUP signal.
|
||||
*args, **kwargs: args and kwargs to be passed to the target function.
|
||||
"""
|
||||
_sighup_callbacks.append((func, args, kwargs))
|
||||
|
||||
_instance_id_to_sighup_callbacks_map.setdefault(homeserver_instance_id, []).append(
|
||||
(func, args, kwargs)
|
||||
)
|
||||
|
||||
|
||||
def unregister_sighups(instance_id: str) -> None:
|
||||
"""
|
||||
Unregister all sighup functions associated with this Synapse instance.
|
||||
|
||||
Args:
|
||||
instance_id: Unique ID for this Synapse process instance.
|
||||
"""
|
||||
_instance_id_to_sighup_callbacks_map.pop(instance_id, [])
|
||||
|
||||
|
||||
def start_worker_reactor(
|
||||
@@ -183,25 +208,23 @@ def start_reactor(
|
||||
if gc_thresholds:
|
||||
gc.set_threshold(*gc_thresholds)
|
||||
install_gc_manager()
|
||||
run_command()
|
||||
|
||||
# make sure that we run the reactor with the sentinel log context,
|
||||
# otherwise other PreserveLoggingContext instances will get confused
|
||||
# and complain when they see the logcontext arbitrarily swapping
|
||||
# between the sentinel and `run` logcontexts.
|
||||
#
|
||||
# We also need to drop the logcontext before forking if we're daemonizing,
|
||||
# otherwise the cputime metrics get confused about the per-thread resource usage
|
||||
# appearing to go backwards.
|
||||
with PreserveLoggingContext():
|
||||
if daemonize:
|
||||
assert pid_file is not None
|
||||
# Reset the logging context when we start the reactor (whenever we yield control
|
||||
# to the reactor, the `sentinel` logging context needs to be set so we don't
|
||||
# leak the current logging context and erroneously apply it to the next task the
|
||||
# reactor event loop picks up)
|
||||
with PreserveLoggingContext():
|
||||
run_command()
|
||||
|
||||
if print_pidfile:
|
||||
print(pid_file)
|
||||
if daemonize:
|
||||
assert pid_file is not None
|
||||
|
||||
daemonize_process(pid_file, logger)
|
||||
run()
|
||||
if print_pidfile:
|
||||
print(pid_file)
|
||||
|
||||
daemonize_process(pid_file, logger)
|
||||
|
||||
run()
|
||||
|
||||
|
||||
def quit_with_error(error_string: str) -> NoReturn:
|
||||
@@ -243,7 +266,7 @@ def redirect_stdio_to_logs() -> None:
|
||||
|
||||
|
||||
def register_start(
|
||||
cb: Callable[P, Awaitable], *args: P.args, **kwargs: P.kwargs
|
||||
hs: "HomeServer", cb: Callable[P, Awaitable], *args: P.args, **kwargs: P.kwargs
|
||||
) -> None:
|
||||
"""Register a callback with the reactor, to be called once it is running
|
||||
|
||||
@@ -280,10 +303,13 @@ def register_start(
|
||||
# on as normal.
|
||||
os._exit(1)
|
||||
|
||||
reactor.callWhenRunning(lambda: defer.ensureDeferred(wrapper()))
|
||||
clock = hs.get_clock()
|
||||
clock.call_when_running(lambda: defer.ensureDeferred(wrapper()))
|
||||
|
||||
|
||||
def listen_metrics(bind_addresses: StrCollection, port: int) -> None:
|
||||
def listen_metrics(
|
||||
bind_addresses: StrCollection, port: int
|
||||
) -> List[Tuple[WSGIServer, Thread]]:
|
||||
"""
|
||||
Start Prometheus metrics server.
|
||||
|
||||
@@ -296,14 +322,22 @@ def listen_metrics(bind_addresses: StrCollection, port: int) -> None:
|
||||
bytecode at a time), this still works because the metrics thread can preempt the
|
||||
Twisted reactor thread between bytecode boundaries and the metrics thread gets
|
||||
scheduled with roughly equal priority to the Twisted reactor thread.
|
||||
|
||||
Returns:
|
||||
List of WSGIServer with the thread they are running on.
|
||||
"""
|
||||
from prometheus_client import start_http_server as start_http_server_prometheus
|
||||
|
||||
from synapse.metrics import RegistryProxy
|
||||
|
||||
servers: List[Tuple[WSGIServer, Thread]] = []
|
||||
for host in bind_addresses:
|
||||
logger.info("Starting metrics listener on %s:%d", host, port)
|
||||
start_http_server_prometheus(port, addr=host, registry=RegistryProxy)
|
||||
server, thread = start_http_server_prometheus(
|
||||
port, addr=host, registry=RegistryProxy
|
||||
)
|
||||
servers.append((server, thread))
|
||||
return servers
|
||||
|
||||
|
||||
def listen_manhole(
|
||||
@@ -311,7 +345,7 @@ def listen_manhole(
|
||||
port: int,
|
||||
manhole_settings: ManholeConfig,
|
||||
manhole_globals: dict,
|
||||
) -> None:
|
||||
) -> List[Port]:
|
||||
# twisted.conch.manhole 21.1.0 uses "int_from_bytes", which produces a confusing
|
||||
# warning. It's fixed by https://github.com/twisted/twisted/pull/1522), so
|
||||
# suppress the warning for now.
|
||||
@@ -323,7 +357,7 @@ def listen_manhole(
|
||||
|
||||
from synapse.util.manhole import manhole
|
||||
|
||||
listen_tcp(
|
||||
return listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
manhole(settings=manhole_settings, globals=manhole_globals),
|
||||
@@ -386,17 +420,26 @@ def listen_http(
|
||||
context_factory: Optional[IOpenSSLContextFactory],
|
||||
reactor: ISynapseReactor = reactor,
|
||||
) -> List[Port]:
|
||||
"""
|
||||
Args:
|
||||
listener_config: TODO
|
||||
root_resource: TODO
|
||||
version_string: A string to present for the Server header
|
||||
max_request_body_size: TODO
|
||||
context_factory: TODO
|
||||
reactor: TODO
|
||||
"""
|
||||
assert listener_config.http_options is not None
|
||||
|
||||
site_tag = listener_config.get_site_tag()
|
||||
|
||||
site = SynapseSite(
|
||||
"synapse.access.%s.%s"
|
||||
logger_name="synapse.access.%s.%s"
|
||||
% ("https" if listener_config.is_tls() else "http", site_tag),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
version_string,
|
||||
site_tag=site_tag,
|
||||
config=listener_config,
|
||||
resource=root_resource,
|
||||
server_version_string=version_string,
|
||||
max_request_body_size=max_request_body_size,
|
||||
reactor=reactor,
|
||||
hs=hs,
|
||||
@@ -500,7 +543,7 @@ def refresh_certificate(hs: "HomeServer") -> None:
|
||||
logger.info("Context factories updated.")
|
||||
|
||||
|
||||
async def start(hs: "HomeServer") -> None:
|
||||
async def start(hs: "HomeServer", freeze: bool = True) -> None:
|
||||
"""
|
||||
Start a Synapse server or worker.
|
||||
|
||||
@@ -511,6 +554,11 @@ async def start(hs: "HomeServer") -> None:
|
||||
|
||||
Args:
|
||||
hs: homeserver instance
|
||||
freeze: whether to freeze the homeserver base objects in the garbage collector.
|
||||
May improve garbage collection performance by marking objects with an effectively
|
||||
static lifetime as frozen so they don't need to be considered for cleanup.
|
||||
If you ever want to `shutdown` the homeserver, this needs to be
|
||||
False otherwise the homeserver cannot be garbage collected after `shutdown`.
|
||||
"""
|
||||
server_name = hs.hostname
|
||||
reactor = hs.get_reactor()
|
||||
@@ -519,7 +567,9 @@ async def start(hs: "HomeServer") -> None:
|
||||
# numbers of DNS requests don't starve out other users of the threadpool.
|
||||
resolver_threadpool = ThreadPool(name="gai_resolver")
|
||||
resolver_threadpool.start()
|
||||
reactor.addSystemEventTrigger("during", "shutdown", resolver_threadpool.stop)
|
||||
hs.get_clock().add_system_event_trigger(
|
||||
"during", "shutdown", resolver_threadpool.stop
|
||||
)
|
||||
reactor.installNameResolver(
|
||||
GAIResolver(reactor, getThreadPool=lambda: resolver_threadpool)
|
||||
)
|
||||
@@ -541,12 +591,17 @@ async def start(hs: "HomeServer") -> None:
|
||||
# we're not using systemd.
|
||||
sdnotify(b"RELOADING=1")
|
||||
|
||||
for i, args, kwargs in _sighup_callbacks:
|
||||
i(*args, **kwargs)
|
||||
for sighup_callbacks in _instance_id_to_sighup_callbacks_map.values():
|
||||
for func, args, kwargs in sighup_callbacks:
|
||||
func(*args, **kwargs)
|
||||
|
||||
sdnotify(b"READY=1")
|
||||
|
||||
return run_as_background_process(
|
||||
# It's okay to ignore the linter error here and call
|
||||
# `run_as_background_process` directly because `_handle_sighup` operates
|
||||
# outside of the scope of a specific `HomeServer` instance and holds no
|
||||
# references to it which would prevent a clean shutdown.
|
||||
return run_as_background_process( # type: ignore[untracked-background-process]
|
||||
"sighup",
|
||||
server_name,
|
||||
_handle_sighup,
|
||||
@@ -564,8 +619,8 @@ async def start(hs: "HomeServer") -> None:
|
||||
|
||||
signal.signal(signal.SIGHUP, run_sighup)
|
||||
|
||||
register_sighup(refresh_certificate, hs)
|
||||
register_sighup(reload_cache_config, hs.config)
|
||||
register_sighup(hs.get_instance_id(), refresh_certificate, hs)
|
||||
register_sighup(hs.get_instance_id(), reload_cache_config, hs.config)
|
||||
|
||||
# Apply the cache config.
|
||||
hs.config.caches.resize_all_caches()
|
||||
@@ -573,9 +628,6 @@ async def start(hs: "HomeServer") -> None:
|
||||
# Load the certificate from disk.
|
||||
refresh_certificate(hs)
|
||||
|
||||
# Start the tracer
|
||||
init_tracer(hs) # noqa
|
||||
|
||||
# Instantiate the modules so they can register their web resources to the module API
|
||||
# before we start the listeners.
|
||||
module_api = hs.get_module_api()
|
||||
@@ -601,32 +653,53 @@ async def start(hs: "HomeServer") -> None:
|
||||
hs.get_datastores().main.db_pool.start_profiling()
|
||||
hs.get_pusherpool().start()
|
||||
|
||||
def log_shutdown() -> None:
|
||||
with LoggingContext(name="log_shutdown", server_name=server_name):
|
||||
logger.info("Shutting down...")
|
||||
|
||||
# Log when we start the shut down process.
|
||||
hs.get_reactor().addSystemEventTrigger(
|
||||
"before", "shutdown", logger.info, "Shutting down..."
|
||||
hs.register_sync_shutdown_handler(
|
||||
phase="before",
|
||||
eventType="shutdown",
|
||||
shutdown_func=log_shutdown,
|
||||
)
|
||||
|
||||
setup_sentry(hs)
|
||||
setup_sdnotify(hs)
|
||||
|
||||
# If background tasks are running on the main process or this is the worker in
|
||||
# charge of them, start collecting the phone home stats and shared usage metrics.
|
||||
if hs.config.worker.run_background_tasks:
|
||||
await hs.get_common_usage_metrics_manager().setup()
|
||||
start_phone_stats_home(hs)
|
||||
|
||||
# We now freeze all allocated objects in the hopes that (almost)
|
||||
# everything currently allocated are things that will be used for the
|
||||
# rest of time. Doing so means less work each GC (hopefully).
|
||||
# Register background tasks required by this server. This must be done
|
||||
# somewhat manually due to the background tasks not being registered
|
||||
# unless handlers are instantiated.
|
||||
#
|
||||
# PyPy does not (yet?) implement gc.freeze()
|
||||
if hasattr(gc, "freeze"):
|
||||
gc.collect()
|
||||
gc.freeze()
|
||||
# While we could "start" these before the reactor runs, nothing will happen until
|
||||
# the reactor is running, so we may as well do it here in `start`.
|
||||
#
|
||||
# Additionally, this means we also start them after we daemonize and fork the
|
||||
# process which means we can avoid any potential problems with cputime metrics
|
||||
# getting confused about the per-thread resource usage appearing to go backwards
|
||||
# because we're comparing the resource usage (`rusage`) from the original process to
|
||||
# the forked process.
|
||||
if hs.config.worker.run_background_tasks:
|
||||
hs.start_background_tasks()
|
||||
|
||||
# Speed up shutdowns by freezing all allocated objects. This moves everything
|
||||
# into the permanent generation and excludes them from the final GC.
|
||||
atexit.register(gc.freeze)
|
||||
if freeze:
|
||||
# We now freeze all allocated objects in the hopes that (almost)
|
||||
# everything currently allocated are things that will be used for the
|
||||
# rest of time. Doing so means less work each GC (hopefully).
|
||||
#
|
||||
# Note that freezing the homeserver object means that it won't be able to be
|
||||
# garbage collected in the case of attempting an in-memory `shutdown`. This only
|
||||
# needs to be considered if such a case is desirable. Exiting the entire Python
|
||||
# process will function expectedly either way.
|
||||
#
|
||||
# PyPy does not (yet?) implement gc.freeze()
|
||||
if hasattr(gc, "freeze"):
|
||||
gc.collect()
|
||||
gc.freeze()
|
||||
|
||||
# Speed up process exit by freezing all allocated objects. This moves everything
|
||||
# into the permanent generation and excludes them from the final GC.
|
||||
atexit.register(gc.freeze)
|
||||
|
||||
|
||||
def reload_cache_config(config: HomeServerConfig) -> None:
|
||||
@@ -701,7 +774,7 @@ def setup_sdnotify(hs: "HomeServer") -> None:
|
||||
# we're not using systemd.
|
||||
sdnotify(b"READY=1\nMAINPID=%i" % (os.getpid(),))
|
||||
|
||||
hs.get_reactor().addSystemEventTrigger(
|
||||
hs.get_clock().add_system_event_trigger(
|
||||
"before", "shutdown", sdnotify, b"STOPPING=1"
|
||||
)
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Mapping, Optional, Sequence
|
||||
from typing import List, Mapping, Optional, Sequence, Tuple
|
||||
|
||||
from twisted.internet import defer, task
|
||||
|
||||
@@ -65,7 +65,6 @@ from synapse.storage.databases.main.stream import StreamWorkerStore
|
||||
from synapse.storage.databases.main.tags import TagsWorkerStore
|
||||
from synapse.storage.databases.main.user_erasure_store import UserErasureWorkerStore
|
||||
from synapse.types import JsonMapping, StateMap
|
||||
from synapse.util import SYNAPSE_VERSION
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
logger = logging.getLogger("synapse.app.admin_cmd")
|
||||
@@ -256,7 +255,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
return self.base_directory
|
||||
|
||||
|
||||
def start(config_options: List[str]) -> None:
|
||||
def load_config(argv_options: List[str]) -> Tuple[HomeServerConfig, argparse.Namespace]:
|
||||
parser = argparse.ArgumentParser(description="Synapse Admin Command")
|
||||
HomeServerConfig.add_arguments_to_parser(parser)
|
||||
|
||||
@@ -282,11 +281,15 @@ def start(config_options: List[str]) -> None:
|
||||
export_data_parser.set_defaults(func=export_data_command)
|
||||
|
||||
try:
|
||||
config, args = HomeServerConfig.load_config_with_parser(parser, config_options)
|
||||
config, args = HomeServerConfig.load_config_with_parser(parser, argv_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
return config, args
|
||||
|
||||
|
||||
def start(config: HomeServerConfig, args: argparse.Namespace) -> None:
|
||||
if config.worker.worker_app is not None:
|
||||
assert config.worker.worker_app == "synapse.app.admin_cmd"
|
||||
|
||||
@@ -312,7 +315,6 @@ def start(config_options: List[str]) -> None:
|
||||
ss = AdminCmdServer(
|
||||
config.server.server_name,
|
||||
config=config,
|
||||
version_string=f"Synapse/{SYNAPSE_VERSION}",
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
@@ -325,7 +327,7 @@ def start(config_options: List[str]) -> None:
|
||||
# command.
|
||||
|
||||
async def run() -> None:
|
||||
with LoggingContext("command"):
|
||||
with LoggingContext(name="command", server_name=config.server.server_name):
|
||||
await _base.start(ss)
|
||||
await args.func(ss, args)
|
||||
|
||||
@@ -337,5 +339,6 @@ def start(config_options: List[str]) -> None:
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config, args = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config, args)
|
||||
|
||||
@@ -21,13 +21,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -21,13 +21,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -20,13 +20,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -21,13 +21,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -21,13 +21,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -21,13 +21,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -49,6 +49,7 @@ from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.server import JsonResource, OptionsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
||||
from synapse.rest import ClientRestResource, admin
|
||||
@@ -111,7 +112,6 @@ from synapse.storage.databases.main.transactions import TransactionWorkerStore
|
||||
from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore
|
||||
from synapse.storage.databases.main.user_directory import UserDirectoryStore
|
||||
from synapse.storage.databases.main.user_erasure_store import UserErasureWorkerStore
|
||||
from synapse.util import SYNAPSE_VERSION
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
|
||||
logger = logging.getLogger("synapse.app.generic_worker")
|
||||
@@ -277,11 +277,13 @@ class GenericWorkerServer(HomeServer):
|
||||
self._listen_http(listener)
|
||||
elif listener.type == "manhole":
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
self._listening_services.extend(
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
@@ -295,9 +297,11 @@ class GenericWorkerServer(HomeServer):
|
||||
)
|
||||
else:
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
self._metrics_listeners.extend(
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
@@ -310,13 +314,26 @@ class GenericWorkerServer(HomeServer):
|
||||
self.get_replication_command_handler().start_replication(self)
|
||||
|
||||
|
||||
def start(config_options: List[str]) -> None:
|
||||
def load_config(argv_options: List[str]) -> HomeServerConfig:
|
||||
"""
|
||||
Parse the commandline and config files (does not generate config)
|
||||
|
||||
Args:
|
||||
argv_options: The options passed to Synapse. Usually `sys.argv[1:]`.
|
||||
|
||||
Returns:
|
||||
Config object.
|
||||
"""
|
||||
try:
|
||||
config = HomeServerConfig.load_config("Synapse worker", config_options)
|
||||
config = HomeServerConfig.load_config("Synapse worker", argv_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def start(config: HomeServerConfig) -> None:
|
||||
# For backwards compatibility let any of the old app names.
|
||||
assert config.worker.worker_app in (
|
||||
"synapse.app.appservice",
|
||||
@@ -341,11 +358,13 @@ def start(config_options: List[str]) -> None:
|
||||
hs = GenericWorkerServer(
|
||||
config.server.server_name,
|
||||
config=config,
|
||||
version_string=f"Synapse/{SYNAPSE_VERSION}",
|
||||
)
|
||||
|
||||
setup_logging(hs, config, use_worker_options=True)
|
||||
|
||||
# Start the tracer
|
||||
init_tracer(hs) # noqa
|
||||
|
||||
try:
|
||||
hs.setup()
|
||||
|
||||
@@ -355,7 +374,10 @@ def start(config_options: List[str]) -> None:
|
||||
except Exception as e:
|
||||
handle_startup_exception(e)
|
||||
|
||||
register_start(_base.start, hs)
|
||||
async def start() -> None:
|
||||
await _base.start(hs)
|
||||
|
||||
register_start(hs, start)
|
||||
|
||||
# redirect stdio to the logs, if configured.
|
||||
if not hs.config.logging.no_redirect_stdio:
|
||||
@@ -365,8 +387,9 @@ def start(config_options: List[str]) -> None:
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -22,14 +22,13 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import Dict, Iterable, List
|
||||
from typing import Dict, Iterable, List, Optional
|
||||
|
||||
from twisted.internet.tcp import Port
|
||||
from twisted.web.resource import EncodingResourceWrapper, Resource
|
||||
from twisted.web.server import GzipEncoderFactory
|
||||
|
||||
import synapse
|
||||
import synapse.config.logger
|
||||
from synapse import events
|
||||
from synapse.api.urls import (
|
||||
CLIENT_API_PREFIX,
|
||||
@@ -50,6 +49,7 @@ from synapse.app._base import (
|
||||
)
|
||||
from synapse.config._base import ConfigError, format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.additional_resource import AdditionalResource
|
||||
@@ -60,6 +60,7 @@ from synapse.http.server import (
|
||||
StaticResource,
|
||||
)
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
||||
from synapse.rest import ClientRestResource, admin
|
||||
@@ -69,7 +70,8 @@ from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||
from synapse.rest.well_known import well_known_resource
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.util.check_dependencies import VERSION, check_requirements
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util.check_dependencies import check_requirements
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.module_loader import load_module
|
||||
|
||||
@@ -81,6 +83,10 @@ def gz_wrap(r: Resource) -> Resource:
|
||||
|
||||
|
||||
class SynapseHomeServer(HomeServer):
|
||||
"""
|
||||
Homeserver class for the main Synapse process.
|
||||
"""
|
||||
|
||||
DATASTORE_CLASS = DataStore
|
||||
|
||||
def _listener_http(
|
||||
@@ -276,11 +282,13 @@ class SynapseHomeServer(HomeServer):
|
||||
)
|
||||
elif listener.type == "manhole":
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
self._listening_services.extend(
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
@@ -293,9 +301,11 @@ class SynapseHomeServer(HomeServer):
|
||||
)
|
||||
else:
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
self._metrics_listeners.extend(
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
@@ -308,17 +318,21 @@ class SynapseHomeServer(HomeServer):
|
||||
logger.warning("Unrecognized listener type: %s", listener.type)
|
||||
|
||||
|
||||
def setup(config_options: List[str]) -> SynapseHomeServer:
|
||||
def load_or_generate_config(argv_options: List[str]) -> HomeServerConfig:
|
||||
"""
|
||||
Parse the commandline and config files
|
||||
|
||||
Supports generation of config files, so is used for the main homeserver app.
|
||||
|
||||
Args:
|
||||
config_options_options: The options passed to Synapse. Usually `sys.argv[1:]`.
|
||||
argv_options: The options passed to Synapse. Usually `sys.argv[1:]`.
|
||||
|
||||
Returns:
|
||||
A homeserver instance.
|
||||
"""
|
||||
try:
|
||||
config = HomeServerConfig.load_or_generate_config(
|
||||
"Synapse Homeserver", config_options
|
||||
"Synapse Homeserver", argv_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n")
|
||||
@@ -332,12 +346,30 @@ def setup(config_options: List[str]) -> SynapseHomeServer:
|
||||
# generating config files and shouldn't try to continue.
|
||||
sys.exit(0)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
) -> SynapseHomeServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse main process.
|
||||
|
||||
Args:
|
||||
config: The configuration for the homeserver.
|
||||
reactor: Optionally provide a reactor to use. Can be useful in different
|
||||
scenarios that you want control over the reactor, such as tests.
|
||||
|
||||
Returns:
|
||||
A homeserver instance.
|
||||
"""
|
||||
|
||||
if config.worker.worker_app:
|
||||
raise ConfigError(
|
||||
"You have specified `worker_app` in the config but are attempting to start a non-worker "
|
||||
"You have specified `worker_app` in the config but are attempting to setup a non-worker "
|
||||
"instance. Please use `python -m synapse.app.generic_worker` instead (or remove the option if this is the main process)."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
|
||||
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage
|
||||
@@ -345,76 +377,100 @@ def setup(config_options: List[str]) -> SynapseHomeServer:
|
||||
if config.server.gc_seconds:
|
||||
synapse.metrics.MIN_TIME_BETWEEN_GCS = config.server.gc_seconds
|
||||
|
||||
if (
|
||||
config.registration.enable_registration
|
||||
and not config.registration.enable_registration_without_verification
|
||||
):
|
||||
if (
|
||||
not config.captcha.enable_registration_captcha
|
||||
and not config.registration.registrations_require_3pid
|
||||
and not config.registration.registration_requires_token
|
||||
):
|
||||
raise ConfigError(
|
||||
"You have enabled open registration without any verification. This is a known vector for "
|
||||
"spam and abuse. If you would like to allow public registration, please consider adding email, "
|
||||
"captcha, or token-based verification. Otherwise this check can be removed by setting the "
|
||||
"`enable_registration_without_verification` config option to `true`."
|
||||
)
|
||||
|
||||
hs = SynapseHomeServer(
|
||||
config.server.server_name,
|
||||
hostname=config.server.server_name,
|
||||
config=config,
|
||||
version_string=f"Synapse/{VERSION}",
|
||||
reactor=reactor,
|
||||
)
|
||||
|
||||
synapse.config.logger.setup_logging(hs, config, use_worker_options=False)
|
||||
return hs
|
||||
|
||||
|
||||
def setup(
|
||||
hs: SynapseHomeServer,
|
||||
*,
|
||||
freeze: bool = True,
|
||||
) -> None:
|
||||
"""
|
||||
Setup a Synapse homeserver instance given a configuration.
|
||||
|
||||
Args:
|
||||
hs: The homeserver to setup.
|
||||
freeze: whether to freeze the homeserver base objects in the garbage collector.
|
||||
May improve garbage collection performance by marking objects with an effectively
|
||||
static lifetime as frozen so they don't need to be considered for cleanup.
|
||||
If you ever want to `shutdown` the homeserver, this needs to be
|
||||
False otherwise the homeserver cannot be garbage collected after `shutdown`.
|
||||
|
||||
Returns:
|
||||
A homeserver instance.
|
||||
"""
|
||||
|
||||
setup_logging(hs, hs.config, use_worker_options=False)
|
||||
|
||||
# Log after we've configured logging.
|
||||
logger.info("Setting up server")
|
||||
|
||||
# Start the tracer
|
||||
init_tracer(hs) # noqa
|
||||
|
||||
try:
|
||||
hs.setup()
|
||||
except Exception as e:
|
||||
handle_startup_exception(e)
|
||||
|
||||
async def start() -> None:
|
||||
async def _start_when_reactor_running() -> None:
|
||||
# TODO: Feels like this should be moved somewhere else.
|
||||
#
|
||||
# Load the OIDC provider metadatas, if OIDC is enabled.
|
||||
if hs.config.oidc.oidc_enabled:
|
||||
oidc = hs.get_oidc_handler()
|
||||
# Loading the provider metadata also ensures the provider config is valid.
|
||||
await oidc.load_metadata()
|
||||
|
||||
await _base.start(hs)
|
||||
await _base.start(hs, freeze)
|
||||
|
||||
# TODO: Feels like this should be moved somewhere else.
|
||||
hs.get_datastores().main.db_pool.updates.start_doing_background_updates()
|
||||
|
||||
register_start(start)
|
||||
|
||||
return hs
|
||||
# Register a callback to be invoked once the reactor is running
|
||||
register_start(hs, _start_when_reactor_running)
|
||||
|
||||
|
||||
def run(hs: HomeServer) -> None:
|
||||
def start_reactor(
|
||||
config: HomeServerConfig,
|
||||
) -> None:
|
||||
"""
|
||||
Start the reactor (Twisted event-loop).
|
||||
|
||||
Args:
|
||||
config: The configuration for the homeserver.
|
||||
"""
|
||||
_base.start_reactor(
|
||||
"synapse-homeserver",
|
||||
soft_file_limit=hs.config.server.soft_file_limit,
|
||||
gc_thresholds=hs.config.server.gc_thresholds,
|
||||
pid_file=hs.config.server.pid_file,
|
||||
daemonize=hs.config.server.daemonize,
|
||||
print_pidfile=hs.config.server.print_pidfile,
|
||||
soft_file_limit=config.server.soft_file_limit,
|
||||
gc_thresholds=config.server.gc_thresholds,
|
||||
pid_file=config.server.pid_file,
|
||||
daemonize=config.server.daemonize,
|
||||
print_pidfile=config.server.print_pidfile,
|
||||
logger=logger,
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
homeserver_config = load_or_generate_config(sys.argv[1:])
|
||||
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
# check base requirements
|
||||
check_requirements()
|
||||
hs = setup(sys.argv[1:])
|
||||
hs = create_homeserver(homeserver_config)
|
||||
setup(hs)
|
||||
|
||||
# redirect stdio to the logs, if configured.
|
||||
if not hs.config.logging.no_redirect_stdio:
|
||||
redirect_stdio_to_logs()
|
||||
|
||||
run(hs)
|
||||
start_reactor(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -21,13 +21,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -29,19 +29,18 @@ from prometheus_client import Gauge
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.metrics.background_process_metrics import (
|
||||
run_as_background_process,
|
||||
)
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.constants import ONE_HOUR_SECONDS, ONE_MINUTE_SECONDS
|
||||
from synapse.util.constants import (
|
||||
MILLISECONDS_PER_SECOND,
|
||||
ONE_HOUR_SECONDS,
|
||||
ONE_MINUTE_SECONDS,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger("synapse.app.homeserver")
|
||||
|
||||
MILLISECONDS_PER_SECOND = 1000
|
||||
|
||||
INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS = 5 * ONE_MINUTE_SECONDS
|
||||
"""
|
||||
We wait 5 minutes to send the first set of stats as the server can be quite busy the
|
||||
@@ -85,8 +84,6 @@ def phone_stats_home(
|
||||
stats: JsonDict,
|
||||
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
|
||||
) -> "defer.Deferred[None]":
|
||||
server_name = hs.hostname
|
||||
|
||||
async def _phone_stats_home(
|
||||
hs: "HomeServer",
|
||||
stats: JsonDict,
|
||||
@@ -200,8 +197,8 @@ def phone_stats_home(
|
||||
except Exception as e:
|
||||
logger.warning("Error reporting stats: %s", e)
|
||||
|
||||
return run_as_background_process(
|
||||
"phone_stats_home", server_name, _phone_stats_home, hs, stats, stats_process
|
||||
return hs.run_as_background_process(
|
||||
"phone_stats_home", _phone_stats_home, hs, stats, stats_process
|
||||
)
|
||||
|
||||
|
||||
@@ -263,9 +260,8 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
|
||||
float(hs.config.server.max_mau_value)
|
||||
)
|
||||
|
||||
return run_as_background_process(
|
||||
return hs.run_as_background_process(
|
||||
"generate_monthly_active_users",
|
||||
server_name,
|
||||
_generate_monthly_active_users,
|
||||
)
|
||||
|
||||
@@ -285,10 +281,16 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
|
||||
|
||||
# We need to defer this init for the cases that we daemonize
|
||||
# otherwise the process ID we get is that of the non-daemon process
|
||||
clock.call_later(0, performance_stats_init)
|
||||
clock.call_later(
|
||||
0,
|
||||
performance_stats_init,
|
||||
)
|
||||
|
||||
# We wait 5 minutes to send the first set of stats as the server can
|
||||
# be quite busy the first few minutes
|
||||
clock.call_later(
|
||||
INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS, phone_stats_home, hs, stats
|
||||
INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS,
|
||||
phone_stats_home,
|
||||
hs,
|
||||
stats,
|
||||
)
|
||||
|
||||
@@ -21,13 +21,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -21,13 +21,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -21,13 +21,14 @@
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -23,15 +23,33 @@
|
||||
import logging
|
||||
import re
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Pattern, Sequence
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Pattern,
|
||||
Sequence,
|
||||
cast,
|
||||
)
|
||||
|
||||
import attr
|
||||
from netaddr import IPSet
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import DeviceListUpdates, JsonDict, JsonMapping, UserID
|
||||
from synapse.types import (
|
||||
DeviceListUpdates,
|
||||
ISynapseThreadlessReactor,
|
||||
JsonDict,
|
||||
JsonMapping,
|
||||
UserID,
|
||||
)
|
||||
from synapse.util.caches.descriptors import _CacheContext, cached
|
||||
from synapse.util.clock import Clock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.appservice.api import ApplicationServiceApi
|
||||
@@ -98,6 +116,15 @@ class ApplicationService:
|
||||
self.sender = sender
|
||||
# The application service user should be part of the server's domain.
|
||||
self.server_name = sender.domain # nb must be called this for @cached
|
||||
|
||||
# Ideally we would require passing in the `HomeServer` `Clock` instance.
|
||||
# However this is not currently possible as there are places which use
|
||||
# `@cached` that aren't aware of the `HomeServer` instance.
|
||||
# nb must be called this for @cached
|
||||
self.clock = Clock(
|
||||
cast(ISynapseThreadlessReactor, reactor), server_name=self.server_name
|
||||
) # type: ignore[multiple-internal-clocks]
|
||||
|
||||
self.namespaces = self._check_namespaces(namespaces)
|
||||
self.id = id
|
||||
self.ip_range_whitelist = ip_range_whitelist
|
||||
|
||||
@@ -81,10 +81,9 @@ from synapse.appservice import (
|
||||
from synapse.appservice.api import ApplicationServiceApi
|
||||
from synapse.events import EventBase
|
||||
from synapse.logging.context import run_in_background
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import DeviceListUpdates, JsonMapping
|
||||
from synapse.util import Clock
|
||||
from synapse.util.clock import Clock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
@@ -200,6 +199,7 @@ class _ServiceQueuer:
|
||||
)
|
||||
self.server_name = hs.hostname
|
||||
self.clock = hs.get_clock()
|
||||
self.hs = hs
|
||||
self._store = hs.get_datastores().main
|
||||
|
||||
def start_background_request(self, service: ApplicationService) -> None:
|
||||
@@ -207,9 +207,7 @@ class _ServiceQueuer:
|
||||
if service.id in self.requests_in_flight:
|
||||
return
|
||||
|
||||
run_as_background_process(
|
||||
"as-sender", self.server_name, self._send_request, service
|
||||
)
|
||||
self.hs.run_as_background_process("as-sender", self._send_request, service)
|
||||
|
||||
async def _send_request(self, service: ApplicationService) -> None:
|
||||
# sanity-check: we shouldn't get here if this service already has a sender
|
||||
@@ -361,6 +359,7 @@ class _TransactionController:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.server_name = hs.hostname
|
||||
self.clock = hs.get_clock()
|
||||
self.hs = hs
|
||||
self.store = hs.get_datastores().main
|
||||
self.as_api = hs.get_application_service_api()
|
||||
|
||||
@@ -448,6 +447,7 @@ class _TransactionController:
|
||||
recoverer = self.RECOVERER_CLASS(
|
||||
self.server_name,
|
||||
self.clock,
|
||||
self.hs,
|
||||
self.store,
|
||||
self.as_api,
|
||||
service,
|
||||
@@ -494,6 +494,7 @@ class _Recoverer:
|
||||
self,
|
||||
server_name: str,
|
||||
clock: Clock,
|
||||
hs: "HomeServer",
|
||||
store: DataStore,
|
||||
as_api: ApplicationServiceApi,
|
||||
service: ApplicationService,
|
||||
@@ -501,6 +502,7 @@ class _Recoverer:
|
||||
):
|
||||
self.server_name = server_name
|
||||
self.clock = clock
|
||||
self.hs = hs
|
||||
self.store = store
|
||||
self.as_api = as_api
|
||||
self.service = service
|
||||
@@ -513,9 +515,8 @@ class _Recoverer:
|
||||
logger.info("Scheduling retries on %s in %fs", self.service.id, delay)
|
||||
self.scheduled_recovery = self.clock.call_later(
|
||||
delay,
|
||||
run_as_background_process,
|
||||
self.hs.run_as_background_process,
|
||||
"as-recoverer",
|
||||
self.server_name,
|
||||
self.retry,
|
||||
)
|
||||
|
||||
@@ -535,9 +536,8 @@ class _Recoverer:
|
||||
if self.scheduled_recovery:
|
||||
self.clock.cancel_call_later(self.scheduled_recovery)
|
||||
# Run a retry, which will resechedule a recovery if it fails.
|
||||
run_as_background_process(
|
||||
self.hs.run_as_background_process(
|
||||
"retry",
|
||||
self.server_name,
|
||||
self.retry,
|
||||
)
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import importlib.resources as importlib_resources
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@@ -46,7 +47,6 @@ from typing import (
|
||||
|
||||
import attr
|
||||
import jinja2
|
||||
import pkg_resources
|
||||
import yaml
|
||||
|
||||
from synapse.types import StrSequence
|
||||
@@ -174,8 +174,8 @@ class Config:
|
||||
self.root = root_config
|
||||
|
||||
# Get the path to the default Synapse template directory
|
||||
self.default_template_dir = pkg_resources.resource_filename(
|
||||
"synapse", "res/templates"
|
||||
self.default_template_dir = str(
|
||||
importlib_resources.files("synapse").joinpath("res").joinpath("templates")
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@@ -545,18 +545,22 @@ class RootConfig:
|
||||
|
||||
@classmethod
|
||||
def load_config(
|
||||
cls: Type[TRootConfig], description: str, argv: List[str]
|
||||
cls: Type[TRootConfig], description: str, argv_options: List[str]
|
||||
) -> TRootConfig:
|
||||
"""Parse the commandline and config files
|
||||
|
||||
Doesn't support config-file-generation: used by the worker apps.
|
||||
|
||||
Args:
|
||||
description: TODO
|
||||
argv_options: The options passed to Synapse. Usually `sys.argv[1:]`.
|
||||
|
||||
Returns:
|
||||
Config object.
|
||||
"""
|
||||
config_parser = argparse.ArgumentParser(description=description)
|
||||
cls.add_arguments_to_parser(config_parser)
|
||||
obj, _ = cls.load_config_with_parser(config_parser, argv)
|
||||
obj, _ = cls.load_config_with_parser(config_parser, argv_options)
|
||||
|
||||
return obj
|
||||
|
||||
@@ -601,7 +605,7 @@ class RootConfig:
|
||||
|
||||
@classmethod
|
||||
def load_config_with_parser(
|
||||
cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv: List[str]
|
||||
cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv_options: List[str]
|
||||
) -> Tuple[TRootConfig, argparse.Namespace]:
|
||||
"""Parse the commandline and config files with the given parser
|
||||
|
||||
@@ -609,16 +613,20 @@ class RootConfig:
|
||||
|
||||
Used for workers where we want to add extra flags/subcommands.
|
||||
|
||||
Note: This is the common denominator for loading config and is also used by
|
||||
`load_config` and `load_or_generate_config`. Which is why we call
|
||||
`validate_config()` here.
|
||||
|
||||
Args:
|
||||
parser
|
||||
argv
|
||||
argv_options: The options passed to Synapse. Usually `sys.argv[1:]`.
|
||||
|
||||
Returns:
|
||||
Returns the parsed config object and the parsed argparse.Namespace
|
||||
object from parser.parse_args(..)`
|
||||
"""
|
||||
|
||||
config_args = parser.parse_args(argv)
|
||||
config_args = parser.parse_args(argv_options)
|
||||
|
||||
config_files = find_config_files(search_paths=config_args.config_path)
|
||||
obj = cls(config_files)
|
||||
@@ -642,16 +650,24 @@ class RootConfig:
|
||||
|
||||
obj.invoke_all("read_arguments", config_args)
|
||||
|
||||
# Now that we finally have the full config sections parsed, allow subclasses to
|
||||
# do some extra validation across the entire config.
|
||||
obj.validate_config()
|
||||
|
||||
return obj, config_args
|
||||
|
||||
@classmethod
|
||||
def load_or_generate_config(
|
||||
cls: Type[TRootConfig], description: str, argv: List[str]
|
||||
cls: Type[TRootConfig], description: str, argv_options: List[str]
|
||||
) -> Optional[TRootConfig]:
|
||||
"""Parse the commandline and config files
|
||||
|
||||
Supports generation of config files, so is used for the main homeserver app.
|
||||
|
||||
Args:
|
||||
description: TODO
|
||||
argv_options: The options passed to Synapse. Usually `sys.argv[1:]`.
|
||||
|
||||
Returns:
|
||||
Config object, or None if --generate-config or --generate-keys was set
|
||||
"""
|
||||
@@ -747,7 +763,7 @@ class RootConfig:
|
||||
)
|
||||
|
||||
cls.invoke_all_static("add_arguments", parser)
|
||||
config_args = parser.parse_args(argv)
|
||||
config_args = parser.parse_args(argv_options)
|
||||
|
||||
config_files = find_config_files(search_paths=config_args.config_path)
|
||||
|
||||
@@ -838,15 +854,7 @@ class RootConfig:
|
||||
):
|
||||
return None
|
||||
|
||||
obj.parse_config_dict(
|
||||
config_dict,
|
||||
config_dir_path=config_dir_path,
|
||||
data_dir_path=data_dir_path,
|
||||
allow_secrets_in_config=config_args.secrets_in_config,
|
||||
)
|
||||
obj.invoke_all("read_arguments", config_args)
|
||||
|
||||
return obj
|
||||
return cls.load_config(description, argv_options)
|
||||
|
||||
def parse_config_dict(
|
||||
self,
|
||||
@@ -907,6 +915,20 @@ class RootConfig:
|
||||
existing_config.root = None
|
||||
return existing_config
|
||||
|
||||
def validate_config(self) -> None:
|
||||
"""
|
||||
Additional config validation across all config sections.
|
||||
|
||||
Override this in subclasses to add extra validation. This is called once all
|
||||
config option values have been populated.
|
||||
|
||||
XXX: This should only validate, not modify the configuration, as the final
|
||||
config state is required for proper validation across all config sections.
|
||||
|
||||
Raises:
|
||||
ConfigError: if the config is invalid.
|
||||
"""
|
||||
|
||||
|
||||
def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]:
|
||||
"""Read the config files and shallowly merge them into a dict.
|
||||
|
||||
@@ -37,6 +37,7 @@ from synapse.config import ( # noqa: F401
|
||||
key,
|
||||
logger,
|
||||
mas,
|
||||
matrixrtc,
|
||||
metrics,
|
||||
modules,
|
||||
oembed,
|
||||
@@ -126,6 +127,7 @@ class RootConfig:
|
||||
auto_accept_invites: auto_accept_invites.AutoAcceptInvitesConfig
|
||||
user_types: user_types.UserTypesConfig
|
||||
mas: mas.MasConfig
|
||||
matrix_rtc: matrixrtc.MatrixRtcConfig
|
||||
|
||||
config_classes: List[Type["Config"]] = ...
|
||||
config_files: List[str]
|
||||
@@ -156,11 +158,11 @@ class RootConfig:
|
||||
) -> str: ...
|
||||
@classmethod
|
||||
def load_or_generate_config(
|
||||
cls: Type[TRootConfig], description: str, argv: List[str]
|
||||
cls: Type[TRootConfig], description: str, argv_options: List[str]
|
||||
) -> Optional[TRootConfig]: ...
|
||||
@classmethod
|
||||
def load_config(
|
||||
cls: Type[TRootConfig], description: str, argv: List[str]
|
||||
cls: Type[TRootConfig], description: str, argv_options: List[str]
|
||||
) -> TRootConfig: ...
|
||||
@classmethod
|
||||
def add_arguments_to_parser(
|
||||
@@ -168,7 +170,7 @@ class RootConfig:
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def load_config_with_parser(
|
||||
cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv: List[str]
|
||||
cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv_options: List[str]
|
||||
) -> Tuple[TRootConfig, argparse.Namespace]: ...
|
||||
def generate_missing_files(
|
||||
self, config_dict: dict, config_dir_path: str
|
||||
|
||||
@@ -412,11 +412,6 @@ class ExperimentalConfig(Config):
|
||||
"msc2409_to_device_messages_enabled", False
|
||||
)
|
||||
|
||||
# The portion of MSC3202 which is related to device masquerading.
|
||||
self.msc3202_device_masquerading_enabled: bool = experimental.get(
|
||||
"msc3202_device_masquerading", False
|
||||
)
|
||||
|
||||
# The portion of MSC3202 related to transaction extensions:
|
||||
# sending device list changes, one-time key counts and fallback key
|
||||
# usage to application services.
|
||||
@@ -556,6 +551,12 @@ class ExperimentalConfig(Config):
|
||||
# MSC4133: Custom profile fields
|
||||
self.msc4133_enabled: bool = experimental.get("msc4133_enabled", False)
|
||||
|
||||
# MSC4143: Matrix RTC Transport using Livekit Backend
|
||||
self.msc4143_enabled: bool = experimental.get("msc4143_enabled", False)
|
||||
|
||||
# MSC4169: Backwards-compatible redaction sending using `/send`
|
||||
self.msc4169_enabled: bool = experimental.get("msc4169_enabled", False)
|
||||
|
||||
# MSC4210: Remove legacy mentions
|
||||
self.msc4210_enabled: bool = experimental.get("msc4210_enabled", False)
|
||||
|
||||
@@ -590,5 +591,5 @@ class ExperimentalConfig(Config):
|
||||
self.msc4293_enabled: bool = experimental.get("msc4293_enabled", False)
|
||||
|
||||
# MSC4306: Thread Subscriptions
|
||||
# (and MSC4308: sliding sync extension for thread subscriptions)
|
||||
# (and MSC4308: Thread Subscriptions extension to Sliding Sync)
|
||||
self.msc4306_enabled: bool = experimental.get("msc4306_enabled", False)
|
||||
|
||||
@@ -18,7 +18,8 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from ._base import RootConfig
|
||||
|
||||
from ._base import ConfigError, RootConfig
|
||||
from .account_validity import AccountValidityConfig
|
||||
from .api import ApiConfig
|
||||
from .appservice import AppServiceConfig
|
||||
@@ -37,6 +38,7 @@ from .jwt import JWTConfig
|
||||
from .key import KeyConfig
|
||||
from .logger import LoggingConfig
|
||||
from .mas import MasConfig
|
||||
from .matrixrtc import MatrixRtcConfig
|
||||
from .metrics import MetricsConfig
|
||||
from .modules import ModulesConfig
|
||||
from .oembed import OembedConfig
|
||||
@@ -66,6 +68,10 @@ from .workers import WorkerConfig
|
||||
|
||||
|
||||
class HomeServerConfig(RootConfig):
|
||||
"""
|
||||
Top-level config object for Synapse homeserver (main process and workers).
|
||||
"""
|
||||
|
||||
config_classes = [
|
||||
ModulesConfig,
|
||||
ServerConfig,
|
||||
@@ -80,6 +86,7 @@ class HomeServerConfig(RootConfig):
|
||||
OembedConfig,
|
||||
CaptchaConfig,
|
||||
VoipConfig,
|
||||
MatrixRtcConfig,
|
||||
RegistrationConfig,
|
||||
AccountValidityConfig,
|
||||
MetricsConfig,
|
||||
@@ -113,3 +120,22 @@ class HomeServerConfig(RootConfig):
|
||||
# This must be last, as it checks for conflicts with other config options.
|
||||
MasConfig,
|
||||
]
|
||||
|
||||
def validate_config(
|
||||
self,
|
||||
) -> None:
|
||||
if (
|
||||
self.registration.enable_registration
|
||||
and not self.registration.enable_registration_without_verification
|
||||
):
|
||||
if (
|
||||
not self.captcha.enable_registration_captcha
|
||||
and not self.registration.registrations_require_3pid
|
||||
and not self.registration.registration_requires_token
|
||||
):
|
||||
raise ConfigError(
|
||||
"You have enabled open registration without any verification. This is a known vector for "
|
||||
"spam and abuse. If you would like to allow public registration, please consider adding email, "
|
||||
"captcha, or token-based verification. Otherwise this check can be removed by setting the "
|
||||
"`enable_registration_without_verification` config option to `true`."
|
||||
)
|
||||
|
||||
@@ -40,7 +40,6 @@ from twisted.logger import (
|
||||
)
|
||||
|
||||
from synapse.logging.context import LoggingContextFilter
|
||||
from synapse.logging.filter import MetadataFilter
|
||||
from synapse.synapse_rust import reset_logging_config
|
||||
from synapse.types import JsonDict
|
||||
|
||||
@@ -213,13 +212,11 @@ def _setup_stdlib_logging(
|
||||
# writes.
|
||||
|
||||
log_context_filter = LoggingContextFilter()
|
||||
log_metadata_filter = MetadataFilter({"server_name": config.server.server_name})
|
||||
old_factory = logging.getLogRecordFactory()
|
||||
|
||||
def factory(*args: Any, **kwargs: Any) -> logging.LogRecord:
|
||||
record = old_factory(*args, **kwargs)
|
||||
log_context_filter.filter(record)
|
||||
log_metadata_filter.filter(record)
|
||||
return record
|
||||
|
||||
logging.setLogRecordFactory(factory)
|
||||
@@ -348,7 +345,9 @@ def setup_logging(
|
||||
# Add a SIGHUP handler to reload the logging configuration, if one is available.
|
||||
from synapse.app import _base as appbase
|
||||
|
||||
appbase.register_sighup(_reload_logging_config, log_config_path)
|
||||
appbase.register_sighup(
|
||||
hs.get_instance_id(), _reload_logging_config, log_config_path
|
||||
)
|
||||
|
||||
# Log immediately so we can grep backwards.
|
||||
logger.warning("***** STARTING SERVER *****")
|
||||
|
||||
67
synapse/config/matrixrtc.py
Normal file
67
synapse/config/matrixrtc.py
Normal file
@@ -0,0 +1,67 @@
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright (C) 2025 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import ValidationError
|
||||
|
||||
from synapse._pydantic_compat import Field, StrictStr, validator
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.pydantic_models import ParseModel
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
|
||||
class TransportConfigModel(ParseModel):
|
||||
type: StrictStr
|
||||
|
||||
livekit_service_url: Optional[StrictStr] = Field(default=None)
|
||||
"""An optional livekit service URL. Only required if type is "livekit"."""
|
||||
|
||||
@validator("livekit_service_url", always=True)
|
||||
def validate_livekit_service_url(cls, v: Any, values: dict) -> Any:
|
||||
if values.get("type") == "livekit" and not v:
|
||||
raise ValueError(
|
||||
"You must set a `livekit_service_url` when using the 'livekit' transport."
|
||||
)
|
||||
|
||||
return v
|
||||
|
||||
|
||||
class MatrixRtcConfigModel(ParseModel):
|
||||
transports: list = []
|
||||
|
||||
|
||||
class MatrixRtcConfig(Config):
|
||||
section = "matrix_rtc"
|
||||
|
||||
def read_config(
|
||||
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
|
||||
) -> None:
|
||||
matrix_rtc = config.get("matrix_rtc", {})
|
||||
if matrix_rtc is None:
|
||||
matrix_rtc = {}
|
||||
|
||||
try:
|
||||
parsed = MatrixRtcConfigModel(**matrix_rtc)
|
||||
except ValidationError as e:
|
||||
raise ConfigError(
|
||||
"Could not validate matrix_rtc config",
|
||||
("matrix_rtc",),
|
||||
) from e
|
||||
|
||||
self.transports = parsed.transports
|
||||
@@ -18,13 +18,13 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
import importlib.resources as importlib_resources
|
||||
import json
|
||||
import re
|
||||
from typing import Any, Dict, Iterable, List, Optional, Pattern
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import attr
|
||||
import pkg_resources
|
||||
|
||||
from synapse.types import JsonDict, StrSequence
|
||||
|
||||
@@ -64,7 +64,12 @@ class OembedConfig(Config):
|
||||
"""
|
||||
# Whether to use the packaged providers.json file.
|
||||
if not oembed_config.get("disable_default_providers") or False:
|
||||
with pkg_resources.resource_stream("synapse", "res/providers.json") as s:
|
||||
path = (
|
||||
importlib_resources.files("synapse")
|
||||
.joinpath("res")
|
||||
.joinpath("providers.json")
|
||||
)
|
||||
with path.open("r", encoding="utf-8") as s:
|
||||
providers = json.load(s)
|
||||
|
||||
yield from self._parse_and_validate_provider(
|
||||
|
||||
@@ -120,11 +120,19 @@ def parse_thumbnail_requirements(
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class MediaUploadLimit:
|
||||
"""A limit on the amount of data a user can upload in a given time
|
||||
period."""
|
||||
"""
|
||||
Represents a limit on the amount of data a user can upload in a given time
|
||||
period.
|
||||
|
||||
These can be configured through the `media_upload_limits` [config option](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#media_upload_limits)
|
||||
or via the `get_media_upload_limits_for_user` module API [callback](https://element-hq.github.io/synapse/latest/modules/media_repository_callbacks.html#get_media_upload_limits_for_user).
|
||||
"""
|
||||
|
||||
max_bytes: int
|
||||
"""The maximum number of bytes that can be uploaded in the given time period."""
|
||||
|
||||
time_period_ms: int
|
||||
"""The time period in milliseconds."""
|
||||
|
||||
|
||||
class ContentRepositoryConfig(Config):
|
||||
|
||||
@@ -172,7 +172,7 @@ class Keyring:
|
||||
_FetchKeyRequest, Dict[str, Dict[str, FetchKeyResult]]
|
||||
] = BatchingQueue(
|
||||
name="keyring_server",
|
||||
server_name=self.server_name,
|
||||
hs=hs,
|
||||
clock=hs.get_clock(),
|
||||
# The method called to fetch each key
|
||||
process_batch_callback=self._inner_fetch_key_requests,
|
||||
@@ -194,6 +194,14 @@ class Keyring:
|
||||
valid_until_ts=2**63, # fake future timestamp
|
||||
)
|
||||
|
||||
def shutdown(self) -> None:
|
||||
"""
|
||||
Prepares the KeyRing for garbage collection by shutting down it's queues.
|
||||
"""
|
||||
self._fetch_keys_queue.shutdown()
|
||||
for key_fetcher in self._key_fetchers:
|
||||
key_fetcher.shutdown()
|
||||
|
||||
async def verify_json_for_server(
|
||||
self,
|
||||
server_name: str,
|
||||
@@ -316,7 +324,7 @@ class Keyring:
|
||||
if key_result.valid_until_ts < verify_request.minimum_valid_until_ts:
|
||||
continue
|
||||
|
||||
await self._process_json(key_result.verify_key, verify_request)
|
||||
await self.process_json(key_result.verify_key, verify_request)
|
||||
verified = True
|
||||
|
||||
if not verified:
|
||||
@@ -326,7 +334,7 @@ class Keyring:
|
||||
Codes.UNAUTHORIZED,
|
||||
)
|
||||
|
||||
async def _process_json(
|
||||
async def process_json(
|
||||
self, verify_key: VerifyKey, verify_request: VerifyJsonRequest
|
||||
) -> None:
|
||||
"""Processes the `VerifyJsonRequest`. Raises if the signature can't be
|
||||
@@ -479,11 +487,17 @@ class KeyFetcher(metaclass=abc.ABCMeta):
|
||||
self.server_name = hs.hostname
|
||||
self._queue = BatchingQueue(
|
||||
name=self.__class__.__name__,
|
||||
server_name=self.server_name,
|
||||
hs=hs,
|
||||
clock=hs.get_clock(),
|
||||
process_batch_callback=self._fetch_keys,
|
||||
)
|
||||
|
||||
def shutdown(self) -> None:
|
||||
"""
|
||||
Prepares the KeyFetcher for garbage collection by shutting down it's queue.
|
||||
"""
|
||||
self._queue.shutdown()
|
||||
|
||||
async def get_keys(
|
||||
self, server_name: str, key_ids: List[str], minimum_valid_until_ts: int
|
||||
) -> Dict[str, FetchKeyResult]:
|
||||
|
||||
@@ -119,7 +119,6 @@ class InviteAutoAccepter:
|
||||
event.state_key,
|
||||
event.room_id,
|
||||
"join",
|
||||
bg_start_span=False,
|
||||
)
|
||||
|
||||
if is_direct_message:
|
||||
|
||||
@@ -38,7 +38,7 @@ from synapse.storage.databases.main import DataStore
|
||||
from synapse.synapse_rust.events import EventInternalMetadata
|
||||
from synapse.types import EventID, JsonDict, StrCollection
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import Clock
|
||||
from synapse.util.clock import Clock
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -306,6 +306,12 @@ class EventContext(UnpersistedEventContextBase):
|
||||
)
|
||||
|
||||
|
||||
EventPersistencePair = Tuple[EventBase, EventContext]
|
||||
"""
|
||||
The combination of an event to be persisted and its context.
|
||||
"""
|
||||
|
||||
|
||||
@attr.s(slots=True, auto_attribs=True)
|
||||
class UnpersistedEventContext(UnpersistedEventContextBase):
|
||||
"""
|
||||
@@ -363,7 +369,7 @@ class UnpersistedEventContext(UnpersistedEventContextBase):
|
||||
room_id: str,
|
||||
last_known_state_group: int,
|
||||
datastore: "StateGroupDataStore",
|
||||
) -> List[Tuple[EventBase, EventContext]]:
|
||||
) -> List[EventPersistencePair]:
|
||||
"""
|
||||
Takes a list of events and their associated unpersisted contexts and persists
|
||||
the unpersisted contexts, returning a list of events and persisted contexts.
|
||||
|
||||
@@ -148,6 +148,7 @@ class FederationClient(FederationBase):
|
||||
self._get_pdu_cache: ExpiringCache[str, Tuple[EventBase, str]] = ExpiringCache(
|
||||
cache_name="get_pdu_cache",
|
||||
server_name=self.server_name,
|
||||
hs=self.hs,
|
||||
clock=self._clock,
|
||||
max_len=1000,
|
||||
expiry_ms=120 * 1000,
|
||||
@@ -167,6 +168,7 @@ class FederationClient(FederationBase):
|
||||
] = ExpiringCache(
|
||||
cache_name="get_room_hierarchy_cache",
|
||||
server_name=self.server_name,
|
||||
hs=self.hs,
|
||||
clock=self._clock,
|
||||
max_len=1000,
|
||||
expiry_ms=5 * 60 * 1000,
|
||||
@@ -495,6 +497,43 @@ class FederationClient(FederationBase):
|
||||
)
|
||||
return RECOMMENDATION_OK
|
||||
|
||||
@trace
|
||||
@tag_args
|
||||
async def ask_policy_server_to_sign_event(
|
||||
self, destination: str, pdu: EventBase, timeout: Optional[int] = None
|
||||
) -> Optional[JsonDict]:
|
||||
"""Requests that the destination server (typically a policy server)
|
||||
sign the event as not spam.
|
||||
|
||||
If the policy server could not be contacted or the policy server
|
||||
returned an error, this returns no signature.
|
||||
|
||||
Args:
|
||||
destination: The remote homeserver to ask (a policy server)
|
||||
pdu: The event to sign
|
||||
timeout: How long to try (in ms) the destination for before
|
||||
giving up. None indicates no timeout.
|
||||
Returns:
|
||||
The signature from the policy server, structured in the same was as the 'signatures'
|
||||
JSON in the event e.g { "$policy_server_via_domain" : { "ed25519:policy_server": "signature_base64" }}
|
||||
"""
|
||||
logger.debug(
|
||||
"ask_policy_server_to_sign_event for event_id=%s from %s",
|
||||
pdu.event_id,
|
||||
destination,
|
||||
)
|
||||
try:
|
||||
return await self.transport_layer.ask_policy_server_to_sign_event(
|
||||
destination, pdu, timeout=timeout
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"ask_policy_server_to_sign_event: server %s responded with error: %s",
|
||||
destination,
|
||||
e,
|
||||
)
|
||||
return None
|
||||
|
||||
@trace
|
||||
@tag_args
|
||||
async def get_pdu(
|
||||
|
||||
@@ -59,7 +59,7 @@ from synapse.api.errors import (
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
||||
from synapse.crypto.event_signing import compute_event_signature
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.snapshot import EventContext
|
||||
from synapse.events.snapshot import EventPersistencePair
|
||||
from synapse.federation.federation_base import (
|
||||
FederationBase,
|
||||
InvalidEventSignatureError,
|
||||
@@ -159,7 +159,7 @@ class FederationServer(FederationBase):
|
||||
# with FederationHandlerRegistry.
|
||||
hs.get_directory_handler()
|
||||
|
||||
self._server_linearizer = Linearizer("fed_server")
|
||||
self._server_linearizer = Linearizer(name="fed_server", clock=hs.get_clock())
|
||||
|
||||
# origins that we are currently processing a transaction from.
|
||||
# a dict from origin to txn id.
|
||||
@@ -914,7 +914,7 @@ class FederationServer(FederationBase):
|
||||
|
||||
async def _on_send_membership_event(
|
||||
self, origin: str, content: JsonDict, membership_type: str, room_id: str
|
||||
) -> Tuple[EventBase, EventContext]:
|
||||
) -> EventPersistencePair:
|
||||
"""Handle an on_send_{join,leave,knock} request
|
||||
|
||||
Does some preliminary validation before passing the request on to the
|
||||
|
||||
@@ -37,6 +37,7 @@ Events are replicated via a separate events stream.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from enum import Enum
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Dict,
|
||||
@@ -67,6 +68,25 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class QueueNames(str, Enum):
|
||||
PRESENCE_MAP = "presence_map"
|
||||
KEYED_EDU = "keyed_edu"
|
||||
KEYED_EDU_CHANGED = "keyed_edu_changed"
|
||||
EDUS = "edus"
|
||||
POS_TIME = "pos_time"
|
||||
PRESENCE_DESTINATIONS = "presence_destinations"
|
||||
|
||||
|
||||
queue_name_to_gauge_map: Dict[QueueNames, LaterGauge] = {}
|
||||
|
||||
for queue_name in QueueNames:
|
||||
queue_name_to_gauge_map[queue_name] = LaterGauge(
|
||||
name=f"synapse_federation_send_queue_{queue_name.value}_size",
|
||||
desc="",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
|
||||
class FederationRemoteSendQueue(AbstractFederationSender):
|
||||
"""A drop in replacement for FederationSender"""
|
||||
|
||||
@@ -111,26 +131,22 @@ class FederationRemoteSendQueue(AbstractFederationSender):
|
||||
# we make a new function, so we need to make a new function so the inner
|
||||
# lambda binds to the queue rather than to the name of the queue which
|
||||
# changes. ARGH.
|
||||
def register(name: str, queue: Sized) -> None:
|
||||
LaterGauge(
|
||||
name="synapse_federation_send_queue_%s_size" % (queue_name,),
|
||||
desc="",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
caller=lambda: {(self.server_name,): len(queue)},
|
||||
def register(queue_name: QueueNames, queue: Sized) -> None:
|
||||
queue_name_to_gauge_map[queue_name].register_hook(
|
||||
homeserver_instance_id=hs.get_instance_id(),
|
||||
hook=lambda: {(self.server_name,): len(queue)},
|
||||
)
|
||||
|
||||
for queue_name in [
|
||||
"presence_map",
|
||||
"keyed_edu",
|
||||
"keyed_edu_changed",
|
||||
"edus",
|
||||
"pos_time",
|
||||
"presence_destinations",
|
||||
]:
|
||||
register(queue_name, getattr(self, queue_name))
|
||||
for queue_name in QueueNames:
|
||||
queue = getattr(self, queue_name.value)
|
||||
assert isinstance(queue, Sized)
|
||||
register(queue_name, queue=queue)
|
||||
|
||||
self.clock.looping_call(self._clear_queue, 30 * 1000)
|
||||
|
||||
def shutdown(self) -> None:
|
||||
"""Stops this federation sender instance from sending further transactions."""
|
||||
|
||||
def _next_pos(self) -> int:
|
||||
pos = self.pos
|
||||
self.pos += 1
|
||||
|
||||
@@ -150,6 +150,7 @@ from prometheus_client import Counter
|
||||
from twisted.internet import defer
|
||||
|
||||
import synapse.metrics
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.events import EventBase
|
||||
from synapse.federation.sender.per_destination_queue import (
|
||||
@@ -167,7 +168,6 @@ from synapse.metrics import (
|
||||
events_processed_counter,
|
||||
)
|
||||
from synapse.metrics.background_process_metrics import (
|
||||
run_as_background_process,
|
||||
wrap_as_background_process,
|
||||
)
|
||||
from synapse.types import (
|
||||
@@ -177,7 +177,7 @@ from synapse.types import (
|
||||
StrCollection,
|
||||
get_domain_from_id,
|
||||
)
|
||||
from synapse.util import Clock
|
||||
from synapse.util.clock import Clock
|
||||
from synapse.util.metrics import Measure
|
||||
from synapse.util.retryutils import filter_destinations_by_retry_limiter
|
||||
|
||||
@@ -199,6 +199,24 @@ sent_pdus_destination_dist_total = Counter(
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
transaction_queue_pending_destinations_gauge = LaterGauge(
|
||||
name="synapse_federation_transaction_queue_pending_destinations",
|
||||
desc="",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
transaction_queue_pending_pdus_gauge = LaterGauge(
|
||||
name="synapse_federation_transaction_queue_pending_pdus",
|
||||
desc="",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
transaction_queue_pending_edus_gauge = LaterGauge(
|
||||
name="synapse_federation_transaction_queue_pending_edus",
|
||||
desc="",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
# Time (in s) to wait before trying to wake up destinations that have
|
||||
# catch-up outstanding.
|
||||
# Please note that rate limiting still applies, so while the loop is
|
||||
@@ -213,6 +231,11 @@ WAKEUP_INTERVAL_BETWEEN_DESTINATIONS_SEC = 5
|
||||
|
||||
|
||||
class AbstractFederationSender(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def shutdown(self) -> None:
|
||||
"""Stops this federation sender instance from sending further transactions."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def notify_new_events(self, max_token: RoomStreamToken) -> None:
|
||||
"""This gets called when we have some new events we might want to
|
||||
@@ -307,6 +330,7 @@ class _DestinationWakeupQueue:
|
||||
_MAX_TIME_IN_QUEUE = 30.0
|
||||
|
||||
sender: "FederationSender" = attr.ib()
|
||||
hs: "HomeServer" = attr.ib()
|
||||
server_name: str = attr.ib()
|
||||
"""
|
||||
Our homeserver name (used to label metrics) (`hs.hostname`).
|
||||
@@ -398,11 +422,9 @@ class FederationSender(AbstractFederationSender):
|
||||
# map from destination to PerDestinationQueue
|
||||
self._per_destination_queues: Dict[str, PerDestinationQueue] = {}
|
||||
|
||||
LaterGauge(
|
||||
name="synapse_federation_transaction_queue_pending_destinations",
|
||||
desc="",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
caller=lambda: {
|
||||
transaction_queue_pending_destinations_gauge.register_hook(
|
||||
homeserver_instance_id=hs.get_instance_id(),
|
||||
hook=lambda: {
|
||||
(self.server_name,): sum(
|
||||
1
|
||||
for d in self._per_destination_queues.values()
|
||||
@@ -410,22 +432,17 @@ class FederationSender(AbstractFederationSender):
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
LaterGauge(
|
||||
name="synapse_federation_transaction_queue_pending_pdus",
|
||||
desc="",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
caller=lambda: {
|
||||
transaction_queue_pending_pdus_gauge.register_hook(
|
||||
homeserver_instance_id=hs.get_instance_id(),
|
||||
hook=lambda: {
|
||||
(self.server_name,): sum(
|
||||
d.pending_pdu_count() for d in self._per_destination_queues.values()
|
||||
)
|
||||
},
|
||||
)
|
||||
LaterGauge(
|
||||
name="synapse_federation_transaction_queue_pending_edus",
|
||||
desc="",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
caller=lambda: {
|
||||
transaction_queue_pending_edus_gauge.register_hook(
|
||||
homeserver_instance_id=hs.get_instance_id(),
|
||||
hook=lambda: {
|
||||
(self.server_name,): sum(
|
||||
d.pending_edu_count() for d in self._per_destination_queues.values()
|
||||
)
|
||||
@@ -441,18 +458,30 @@ class FederationSender(AbstractFederationSender):
|
||||
1.0 / hs.config.ratelimiting.federation_rr_transactions_per_room_per_second
|
||||
)
|
||||
self._destination_wakeup_queue = _DestinationWakeupQueue(
|
||||
self, self.server_name, self.clock, max_delay_s=rr_txn_interval_per_room_s
|
||||
self,
|
||||
hs,
|
||||
self.server_name,
|
||||
self.clock,
|
||||
max_delay_s=rr_txn_interval_per_room_s,
|
||||
)
|
||||
|
||||
# It is important for `_is_shutdown` to be instantiated before the looping call
|
||||
# for `wake_destinations_needing_catchup`.
|
||||
self._is_shutdown = False
|
||||
|
||||
# Regularly wake up destinations that have outstanding PDUs to be caught up
|
||||
self.clock.looping_call_now(
|
||||
run_as_background_process,
|
||||
self.hs.run_as_background_process,
|
||||
WAKEUP_RETRY_PERIOD_SEC * 1000.0,
|
||||
"wake_destinations_needing_catchup",
|
||||
self.server_name,
|
||||
self._wake_destinations_needing_catchup,
|
||||
)
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self._is_shutdown = True
|
||||
for queue in self._per_destination_queues.values():
|
||||
queue.shutdown()
|
||||
|
||||
def _get_per_destination_queue(
|
||||
self, destination: str
|
||||
) -> Optional[PerDestinationQueue]:
|
||||
@@ -491,16 +520,15 @@ class FederationSender(AbstractFederationSender):
|
||||
return
|
||||
|
||||
# fire off a processing loop in the background
|
||||
run_as_background_process(
|
||||
self.hs.run_as_background_process(
|
||||
"process_event_queue_for_federation",
|
||||
self.server_name,
|
||||
self._process_event_queue_loop,
|
||||
)
|
||||
|
||||
async def _process_event_queue_loop(self) -> None:
|
||||
try:
|
||||
self._is_processing = True
|
||||
while True:
|
||||
while not self._is_shutdown:
|
||||
last_token = await self.store.get_federation_out_pos("events")
|
||||
(
|
||||
next_token,
|
||||
@@ -644,6 +672,31 @@ class FederationSender(AbstractFederationSender):
|
||||
)
|
||||
return
|
||||
|
||||
# If we've rescinded an invite then we want to tell the
|
||||
# other server.
|
||||
if (
|
||||
event.type == EventTypes.Member
|
||||
and event.membership == Membership.LEAVE
|
||||
and event.sender != event.state_key
|
||||
):
|
||||
# We check if this leave event is rescinding an invite
|
||||
# by looking if there is an invite event for the user in
|
||||
# the auth events. It could otherwise be a kick or
|
||||
# unban, which we don't want to send (if the user wasn't
|
||||
# already in the room).
|
||||
auth_events = await self.store.get_events_as_list(
|
||||
event.auth_event_ids()
|
||||
)
|
||||
for auth_event in auth_events:
|
||||
if (
|
||||
auth_event.type == EventTypes.Member
|
||||
and auth_event.state_key == event.state_key
|
||||
and auth_event.membership == Membership.INVITE
|
||||
):
|
||||
destinations = set(destinations)
|
||||
destinations.add(get_domain_from_id(event.state_key))
|
||||
break
|
||||
|
||||
sharded_destinations = {
|
||||
d
|
||||
for d in destinations
|
||||
@@ -1086,7 +1139,7 @@ class FederationSender(AbstractFederationSender):
|
||||
|
||||
last_processed: Optional[str] = None
|
||||
|
||||
while True:
|
||||
while not self._is_shutdown:
|
||||
destinations_to_wake = (
|
||||
await self.store.get_catch_up_outstanding_destinations(last_processed)
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user