mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-09 01:30:18 +00:00
Compare commits
110 Commits
travis/adm
...
erikj/fast
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9db46d7608 | ||
|
|
a6e326582f | ||
|
|
cd339d52b6 | ||
|
|
e7348406a3 | ||
|
|
4a01e2df47 | ||
|
|
2465659942 | ||
|
|
501b96134c | ||
|
|
f8887a64e4 | ||
|
|
8551e0f0af | ||
|
|
25289b6444 | ||
|
|
86370979d9 | ||
|
|
664f0e8938 | ||
|
|
ea87853188 | ||
|
|
caf5f0110e | ||
|
|
a31d53b28f | ||
|
|
16a639e0fe | ||
|
|
a2ba909ded | ||
|
|
c823d2e98a | ||
|
|
7ae7468159 | ||
|
|
d4af2970f3 | ||
|
|
31a38f57f5 | ||
|
|
5b8b45a16d | ||
|
|
3d683350e9 | ||
|
|
106afe4984 | ||
|
|
5106818bd0 | ||
|
|
f13a136396 | ||
|
|
2c236be058 | ||
|
|
458e6410e8 | ||
|
|
1dd5f68251 | ||
|
|
8344c944b1 | ||
|
|
b34342eedf | ||
|
|
61e79a4cdf | ||
|
|
b7e7f537f1 | ||
|
|
8fb9c105c9 | ||
|
|
a82b8a966a | ||
|
|
f5f2c9587e | ||
|
|
0be7fe926d | ||
|
|
98f84256e9 | ||
|
|
15b927ffab | ||
|
|
7fa88d6d07 | ||
|
|
9ecf192089 | ||
|
|
6838a1020b | ||
|
|
a77befcc29 | ||
|
|
cedb8cd045 | ||
|
|
bb84121553 | ||
|
|
7de4fdf61a | ||
|
|
8fc9aa70a5 | ||
|
|
3db73b974f | ||
|
|
c51bd89c3b | ||
|
|
7de9ac01a0 | ||
|
|
4e118aecd0 | ||
|
|
11a11414c5 | ||
|
|
8a4e2e826d | ||
|
|
875269eb53 | ||
|
|
56f5097d1c | ||
|
|
797fa5728d | ||
|
|
c58d7ade38 | ||
|
|
6127aa0d50 | ||
|
|
5c1d31816b | ||
|
|
5ea2cf2484 | ||
|
|
66504d1144 | ||
|
|
cda922830e | ||
|
|
f0f9a82ca4 | ||
|
|
f031105eee | ||
|
|
a0d6469069 | ||
|
|
84991317d0 | ||
|
|
56c166cbf0 | ||
|
|
a07e26a936 | ||
|
|
b07dc6a27d | ||
|
|
42297bfceb | ||
|
|
88785dbaeb | ||
|
|
3563138b5e | ||
|
|
f3e5503f10 | ||
|
|
1a89d0f87e | ||
|
|
c0d46dea26 | ||
|
|
20d344a96d | ||
|
|
fc10a5ee29 | ||
|
|
d72c278a07 | ||
|
|
b274d6561c | ||
|
|
49cb78376e | ||
|
|
88f38ea149 | ||
|
|
5f027adb33 | ||
|
|
e6dbbbb315 | ||
|
|
78ce4dc26f | ||
|
|
60be549c0c | ||
|
|
97d2738eef | ||
|
|
945e22303c | ||
|
|
481c4e2b55 | ||
|
|
5129668449 | ||
|
|
3c13c3bebf | ||
|
|
1e5e6a48be | ||
|
|
947216abc0 | ||
|
|
c5999cf452 | ||
|
|
28c9ed3ccb | ||
|
|
1dc29563c1 | ||
|
|
66daf0bfae | ||
|
|
b9b8775db7 | ||
|
|
e1b429d88e | ||
|
|
8c1e60045c | ||
|
|
bf0370162f | ||
|
|
5c2765bd7a | ||
|
|
f8a7872ddb | ||
|
|
b8ad9bf1b7 | ||
|
|
9b86458900 | ||
|
|
57220706b2 | ||
|
|
616ada3dfe | ||
|
|
15a702c952 | ||
|
|
efe57acf31 | ||
|
|
6ef2f93a08 | ||
|
|
d80c397c3a |
140
.github/workflows/docker.yml
vendored
140
.github/workflows/docker.yml
vendored
@@ -5,7 +5,7 @@ name: Build docker images
|
||||
on:
|
||||
push:
|
||||
tags: ["v*"]
|
||||
branches: [ master, main, develop ]
|
||||
branches: [master, main, develop]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@@ -14,24 +14,22 @@ permissions:
|
||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-22.04
|
||||
name: Build and push image for ${{ matrix.platform }}
|
||||
runs-on: ${{ matrix.runs_on }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runs_on: ubuntu-24.04
|
||||
suffix: linux-amd64
|
||||
- platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
suffix: linux-arm64
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Inspect builder
|
||||
run: docker buildx inspect
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@398d4b0eeef1380460a10c8013a76f728fb906ac # v3.9.1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
@@ -55,13 +53,79 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
images: |
|
||||
push: true
|
||||
labels: |
|
||||
gitsha1=${{ github.sha }}
|
||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||
tags: |
|
||||
docker.io/matrixdotorg/synapse
|
||||
ghcr.io/element-hq/synapse
|
||||
file: "docker/Dockerfile"
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ matrix.suffix }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge:
|
||||
name: Push merged images to ${{ matrix.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
repository:
|
||||
- docker.io/matrixdotorg/synapse
|
||||
- ghcr.io/element-hq/synapse
|
||||
|
||||
needs:
|
||||
- build
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ matrix.repository }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
@@ -69,31 +133,23 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=pep440,pattern={{raw}}
|
||||
type=sha
|
||||
|
||||
- name: Build and push all platforms
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
push: true
|
||||
labels: |
|
||||
gitsha1=${{ github.sha }}
|
||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
# arm64 builds OOM without the git fetch setting. c.f.
|
||||
# https://github.com/rust-lang/cargo/issues/10583
|
||||
build-args: |
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
- name: Sign the images with GitHub OIDC Token
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
env:
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
TAGS: ${{ steps.set-tag.outputs.tags }}
|
||||
REPOSITORY: ${{ matrix.repository }}
|
||||
run: |
|
||||
images=""
|
||||
for tag in ${TAGS}; do
|
||||
images+="${tag}@${DIGEST} "
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "$REPOSITORY@sha256:%s " *)
|
||||
|
||||
- name: Sign each manifest
|
||||
env:
|
||||
REPOSITORY: ${{ matrix.repository }}
|
||||
run: |
|
||||
DIGESTS=""
|
||||
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
|
||||
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
|
||||
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
|
||||
done
|
||||
cosign sign --yes ${images}
|
||||
cosign sign --yes $DIGESTS
|
||||
|
||||
2
.github/workflows/latest_deps.yml
vendored
2
.github/workflows/latest_deps.yml
vendored
@@ -60,7 +60,7 @@ jobs:
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --no-dev
|
||||
- run: poetry update --without dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove unhelpful options from mypy config
|
||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||
|
||||
40
.github/workflows/release-artifacts.yml
vendored
40
.github/workflows/release-artifacts.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: "3.x"
|
||||
- id: set-distros
|
||||
run: |
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
@@ -76,7 +76,7 @@ jobs:
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Build the packages
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
@@ -107,12 +107,15 @@ jobs:
|
||||
path: debs/*
|
||||
|
||||
build-wheels:
|
||||
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
|
||||
name: Build wheels on ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-24.04, macos-13]
|
||||
arch: [x86_64, aarch64]
|
||||
os:
|
||||
- ubuntu-24.04
|
||||
- ubuntu-24.04-arm
|
||||
- macos-13 # This uses x86-64
|
||||
- macos-14 # This uses arm64
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
@@ -122,12 +125,11 @@ jobs:
|
||||
# Don't build macos wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "macos-13"
|
||||
# Don't build aarch64 wheels on mac.
|
||||
- os: "macos-13"
|
||||
arch: aarch64
|
||||
- is_pr: true
|
||||
os: "macos-14"
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
- is_pr: true
|
||||
arch: aarch64
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -141,19 +143,9 @@ jobs:
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==3.0.0
|
||||
|
||||
- name: Set up QEMU to emulate aarch64
|
||||
if: matrix.arch == 'aarch64'
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Build aarch64 wheels
|
||||
if: matrix.arch == 'aarch64'
|
||||
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
|
||||
|
||||
- name: Only build a single wheel on PR
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
run: echo "CIBW_BUILD="cp39-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||
run: echo "CIBW_BUILD="cp39-manylinux_*"" >> $GITHUB_ENV
|
||||
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
@@ -161,13 +153,10 @@ jobs:
|
||||
# Skip testing for platforms which various libraries don't have wheels
|
||||
# for, and so need extra build deps.
|
||||
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
|
||||
name: Wheel-${{ matrix.os }}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
build-sdist:
|
||||
@@ -179,7 +168,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: "3.10"
|
||||
|
||||
- run: pip install build
|
||||
|
||||
@@ -191,7 +180,6 @@ jobs:
|
||||
name: Sdist
|
||||
path: dist/*.tar.gz
|
||||
|
||||
|
||||
# if it's a tag, create a release and attach the artifacts to it
|
||||
attach-assets:
|
||||
name: "Attach assets to release"
|
||||
|
||||
42
.github/workflows/tests.yml
vendored
42
.github/workflows/tests.yml
vendored
@@ -263,6 +263,43 @@ jobs:
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
lint-rust:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
# Install like a normal project from source with all optional dependencies
|
||||
extras: all
|
||||
install-project: "true"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
|
||||
# The `::error::` syntax is using GitHub Actions' error annotations, see
|
||||
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
|
||||
run: |
|
||||
if git diff --quiet Cargo.lock; then
|
||||
echo "Cargo.lock is up to date"
|
||||
else
|
||||
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
|
||||
git diff --exit-code Cargo.lock
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
|
||||
# for some of the unstable options we use in `.rustfmt.toml`.
|
||||
lint-rustfmt:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
@@ -274,7 +311,8 @@ jobs:
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
# We use nightly so that it correctly groups together imports
|
||||
# We use nightly so that we can use some unstable options that we use in
|
||||
# `.rustfmt.toml`.
|
||||
toolchain: nightly-2025-04-23
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
@@ -309,6 +347,7 @@ jobs:
|
||||
- check-lockfile
|
||||
- lint-clippy
|
||||
- lint-clippy-nightly
|
||||
- lint-rust
|
||||
- lint-rustfmt
|
||||
- lint-readme
|
||||
runs-on: ubuntu-latest
|
||||
@@ -327,6 +366,7 @@ jobs:
|
||||
lint-pydantic
|
||||
lint-clippy
|
||||
lint-clippy-nightly
|
||||
lint-rust
|
||||
lint-rustfmt
|
||||
lint-readme
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -47,6 +47,7 @@ __pycache__/
|
||||
/.idea/
|
||||
/.ropeproject/
|
||||
/.vscode/
|
||||
/.zed/
|
||||
|
||||
# build products
|
||||
!/.coveragerc
|
||||
|
||||
@@ -1 +1,6 @@
|
||||
# Unstable options are only available on a nightly toolchain and must be opted into
|
||||
unstable_features = true
|
||||
|
||||
# `group_imports` is an unstable option that requires nightly Rust toolchain. Tracked by
|
||||
# https://github.com/rust-lang/rustfmt/issues/5083
|
||||
group_imports = "StdExternalCrate"
|
||||
|
||||
165
CHANGES.md
165
CHANGES.md
@@ -1,3 +1,168 @@
|
||||
# Synapse 1.135.0rc2 (2025-07-30)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix user failing to deactivate with MAS when `/_synapse/mas` is handled by a worker. ([\#18716](https://github.com/element-hq/synapse/issues/18716))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Fix performance regression introduced in [#18238](https://github.com/element-hq/synapse/issues/18238) by adding a cache to `is_server_admin`. ([\#18747](https://github.com/element-hq/synapse/issues/18747))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.135.0rc1 (2025-07-22)
|
||||
|
||||
### Features
|
||||
|
||||
- Add `recaptcha_private_key_path` and `recaptcha_public_key_path` config option. ([\#17984](https://github.com/element-hq/synapse/issues/17984), [\#18684](https://github.com/element-hq/synapse/issues/18684))
|
||||
- Add plain-text handling for rich-text topics as per [MSC3765](https://github.com/matrix-org/matrix-spec-proposals/pull/3765). ([\#18195](https://github.com/element-hq/synapse/issues/18195))
|
||||
- If enabled by the user, server admins will see [soft failed](https://spec.matrix.org/v1.13/server-server-api/#soft-failure) events over the Client-Server API. ([\#18238](https://github.com/element-hq/synapse/issues/18238))
|
||||
- Add experimental support for [MSC4277: Harmonizing the reporting endpoints](https://github.com/matrix-org/matrix-spec-proposals/pull/4277). ([\#18263](https://github.com/element-hq/synapse/issues/18263))
|
||||
- Add ability to limit amount of media uploaded by a user in a given time period. ([\#18527](https://github.com/element-hq/synapse/issues/18527))
|
||||
- Enable workers to write directly to the device lists stream and handle device list updates, reducing load on the main process. ([\#18581](https://github.com/element-hq/synapse/issues/18581))
|
||||
- Support arbitrary profile fields. Contributed by @clokep. ([\#18635](https://github.com/element-hq/synapse/issues/18635))
|
||||
- Advertise support for Matrix v1.12. ([\#18647](https://github.com/element-hq/synapse/issues/18647))
|
||||
- Add an option to issue redactions as an admin user via the [admin redaction endpoint](https://element-hq.github.io/synapse/latest/admin_api/user_admin_api.html#redact-all-the-events-of-a-user). ([\#18671](https://github.com/element-hq/synapse/issues/18671))
|
||||
- Add experimental and incomplete support for [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/blob/rei/msc_thread_subscriptions/proposals/4306-thread-subscriptions.md). ([\#18674](https://github.com/element-hq/synapse/issues/18674))
|
||||
- Include `event_id` when getting state with `?format=event`. Contributed by @tulir @ Beeper. ([\#18675](https://github.com/element-hq/synapse/issues/18675))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix CPU and database spinning when retrying sending events to servers whilst at the same time purging those events. ([\#18499](https://github.com/element-hq/synapse/issues/18499))
|
||||
- Don't allow creation of tags with names longer than 255 bytes, [as per the spec](https://spec.matrix.org/v1.15/client-server-api/#events-14). ([\#18660](https://github.com/element-hq/synapse/issues/18660))
|
||||
- Fix `sliding_sync_connections`-related errors when porting from SQLite to Postgres. ([\#18677](https://github.com/element-hq/synapse/issues/18677))
|
||||
- Fix the MAS integration not working when Synapse is started with `--daemonize` or using `synctl`. ([\#18691](https://github.com/element-hq/synapse/issues/18691))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Document that some config options for the user directory are in violation of the Matrix spec. ([\#18548](https://github.com/element-hq/synapse/issues/18548))
|
||||
- Update `rc_delayed_event_mgmt` docs to the actual nesting level. Contributed by @HarHarLinks. ([\#18692](https://github.com/element-hq/synapse/issues/18692))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add a dedicated internal API for Matrix Authentication Service to Synapse communication. ([\#18520](https://github.com/element-hq/synapse/issues/18520))
|
||||
- Allow user registrations to be done on workers. ([\#18552](https://github.com/element-hq/synapse/issues/18552))
|
||||
- Remove unnecessary HTTP replication calls. ([\#18564](https://github.com/element-hq/synapse/issues/18564))
|
||||
- Refactor `Measure` block metrics to be homeserver-scoped. ([\#18601](https://github.com/element-hq/synapse/issues/18601))
|
||||
- Refactor cache metrics to be homeserver-scoped. ([\#18604](https://github.com/element-hq/synapse/issues/18604))
|
||||
- Unbreak "Latest dependencies" workflow by using the `--without dev` poetry option instead of removed `--no-dev`. ([\#18617](https://github.com/element-hq/synapse/issues/18617))
|
||||
- Update URL Preview code to work with `lxml` 6.0.0+. ([\#18622](https://github.com/element-hq/synapse/issues/18622))
|
||||
- Use `markdown-it-py` instead of `commonmark` in the release script. ([\#18637](https://github.com/element-hq/synapse/issues/18637))
|
||||
- Fix typing errors with upgraded mypy version. ([\#18653](https://github.com/element-hq/synapse/issues/18653))
|
||||
- Add doc comment explaining that config files are shallowly merged. ([\#18664](https://github.com/element-hq/synapse/issues/18664))
|
||||
- Minor speed up of insertion into `stream_positions` table. ([\#18672](https://github.com/element-hq/synapse/issues/18672))
|
||||
- Remove unused `allow_no_prev_events` option when creating an event. ([\#18676](https://github.com/element-hq/synapse/issues/18676))
|
||||
- Clean up `MetricsResource` and Prometheus hacks. ([\#18687](https://github.com/element-hq/synapse/issues/18687))
|
||||
- Fix dirty `Cargo.lock` changes appearing after install (`base64`). ([\#18689](https://github.com/element-hq/synapse/issues/18689))
|
||||
- Prevent dirty `Cargo.lock` changes from install. ([\#18693](https://github.com/element-hq/synapse/issues/18693))
|
||||
- Correct spelling of 'Admin token used' log line. ([\#18697](https://github.com/element-hq/synapse/issues/18697))
|
||||
- Reduce log spam when client stops downloading media while it is being streamed to them. ([\#18699](https://github.com/element-hq/synapse/issues/18699))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump authlib from 1.6.0 to 1.6.1. ([\#18704](https://github.com/element-hq/synapse/issues/18704))
|
||||
* Bump base64 from 0.21.7 to 0.22.1. ([\#18666](https://github.com/element-hq/synapse/issues/18666))
|
||||
* Bump jsonschema from 4.24.0 to 4.25.0. ([\#18707](https://github.com/element-hq/synapse/issues/18707))
|
||||
* Bump lxml from 5.4.0 to 6.0.0. ([\#18631](https://github.com/element-hq/synapse/issues/18631))
|
||||
* Bump mypy from 1.13.0 to 1.16.1. ([\#18653](https://github.com/element-hq/synapse/issues/18653))
|
||||
* Bump once_cell from 1.19.0 to 1.21.3. ([\#18710](https://github.com/element-hq/synapse/issues/18710))
|
||||
* Bump phonenumbers from 9.0.8 to 9.0.9. ([\#18681](https://github.com/element-hq/synapse/issues/18681))
|
||||
* Bump ruff from 0.12.2 to 0.12.5. ([\#18683](https://github.com/element-hq/synapse/issues/18683), [\#18705](https://github.com/element-hq/synapse/issues/18705))
|
||||
* Bump serde_json from 1.0.140 to 1.0.141. ([\#18709](https://github.com/element-hq/synapse/issues/18709))
|
||||
* Bump sigstore/cosign-installer from 3.9.1 to 3.9.2. ([\#18708](https://github.com/element-hq/synapse/issues/18708))
|
||||
* Bump types-jsonschema from 4.24.0.20250528 to 4.24.0.20250708. ([\#18682](https://github.com/element-hq/synapse/issues/18682))
|
||||
|
||||
# Synapse 1.134.0 (2025-07-15)
|
||||
|
||||
No significant changes since 1.134.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.134.0rc1 (2025-07-09)
|
||||
|
||||
### Features
|
||||
|
||||
- Support for [MSC4235](https://github.com/matrix-org/matrix-spec-proposals/pull/4235): `via` query param for hierarchy endpoint. Contributed by Krishan (@kfiven). ([\#18070](https://github.com/element-hq/synapse/issues/18070))
|
||||
- Add `forget_forced_upon_leave` capability as per [MSC4267](https://github.com/matrix-org/matrix-spec-proposals/pull/4267). ([\#18196](https://github.com/element-hq/synapse/issues/18196))
|
||||
- Add `federated_user_may_invite` spam checker callback which receives the entire invite event. Contributed by @tulir @ Beeper. ([\#18241](https://github.com/element-hq/synapse/issues/18241))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix `KeyError` on background updates when using split main/state databases. ([\#18509](https://github.com/element-hq/synapse/issues/18509))
|
||||
- Improve performance of device deletion by adding missing index. ([\#18582](https://github.com/element-hq/synapse/issues/18582))
|
||||
- Fix `avatar_url` and `displayname` being sent on federation profile queries when they are not set. ([\#18593](https://github.com/element-hq/synapse/issues/18593))
|
||||
- Respond with 401 & `M_USER_LOCKED` when a locked user calls `POST /login`, as per the spec. ([\#18594](https://github.com/element-hq/synapse/issues/18594))
|
||||
- Ensure policy servers are not asked to scan policy server change events, allowing rooms to disable the use of a policy server while the policy server is down. ([\#18605](https://github.com/element-hq/synapse/issues/18605))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Fix documentation of the Delete Room Admin API's status field. ([\#18519](https://github.com/element-hq/synapse/issues/18519))
|
||||
|
||||
### Deprecations and Removals
|
||||
|
||||
- Stop adding the "origin" field to newly-created events (PDUs). ([\#18418](https://github.com/element-hq/synapse/issues/18418))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Replace `PyICU` crate with equivalent `icu_segmenter` Rust crate. ([\#18553](https://github.com/element-hq/synapse/issues/18553), [\#18646](https://github.com/element-hq/synapse/issues/18646))
|
||||
- Improve docstring on `simple_upsert_many`. ([\#18573](https://github.com/element-hq/synapse/issues/18573))
|
||||
- Raise poetry-core version cap to 2.1.3. ([\#18575](https://github.com/element-hq/synapse/issues/18575))
|
||||
- Raise setuptools_rust version cap to 1.11.1. ([\#18576](https://github.com/element-hq/synapse/issues/18576))
|
||||
- Better handling of ratelimited requests. ([\#18595](https://github.com/element-hq/synapse/issues/18595), [\#18600](https://github.com/element-hq/synapse/issues/18600))
|
||||
- Update to Rust 1.87.0 in CI, and bump the pinned commit of the `dtolnay/rust-toolchain` GitHub Action to `b3b07ba8b418998c39fb20f53e8b695cdcc8de1b`. ([\#18596](https://github.com/element-hq/synapse/issues/18596))
|
||||
- Speed up bulk device deletion. ([\#18602](https://github.com/element-hq/synapse/issues/18602))
|
||||
- Speed up the building of arm-based wheels in CI. ([\#18618](https://github.com/element-hq/synapse/issues/18618))
|
||||
- Speed up the building of Docker images in CI. ([\#18620](https://github.com/element-hq/synapse/issues/18620))
|
||||
- Add `.zed/` directory to `.gitignore`. ([\#18623](https://github.com/element-hq/synapse/issues/18623))
|
||||
- Log the room ID we're purging state for. ([\#18625](https://github.com/element-hq/synapse/issues/18625))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump Swatinem/rust-cache from 2.7.8 to 2.8.0. ([\#18612](https://github.com/element-hq/synapse/issues/18612))
|
||||
* Bump attrs from 24.2.0 to 25.3.0. ([\#18649](https://github.com/element-hq/synapse/issues/18649))
|
||||
* Bump authlib from 1.5.2 to 1.6.0. ([\#18642](https://github.com/element-hq/synapse/issues/18642))
|
||||
* Bump base64 from 0.21.7 to 0.22.1. ([\#18589](https://github.com/element-hq/synapse/issues/18589))
|
||||
* Bump base64 from 0.21.7 to 0.22.1. ([\#18629](https://github.com/element-hq/synapse/issues/18629))
|
||||
* Bump docker/build-push-action from 6.17.0 to 6.18.0. ([\#18497](https://github.com/element-hq/synapse/issues/18497))
|
||||
* Bump docker/setup-buildx-action from 3.10.0 to 3.11.1. ([\#18587](https://github.com/element-hq/synapse/issues/18587))
|
||||
* Bump hiredis from 3.1.0 to 3.2.1. ([\#18638](https://github.com/element-hq/synapse/issues/18638))
|
||||
* Bump ijson from 3.3.0 to 3.4.0. ([\#18650](https://github.com/element-hq/synapse/issues/18650))
|
||||
* Bump jsonschema from 4.23.0 to 4.24.0. ([\#18630](https://github.com/element-hq/synapse/issues/18630))
|
||||
* Bump msgpack from 1.1.0 to 1.1.1. ([\#18651](https://github.com/element-hq/synapse/issues/18651))
|
||||
* Bump mypy-zope from 1.0.11 to 1.0.12. ([\#18640](https://github.com/element-hq/synapse/issues/18640))
|
||||
* Bump phonenumbers from 9.0.2 to 9.0.8. ([\#18652](https://github.com/element-hq/synapse/issues/18652))
|
||||
* Bump pillow from 11.2.1 to 11.3.0. ([\#18624](https://github.com/element-hq/synapse/issues/18624))
|
||||
* Bump prometheus-client from 0.21.0 to 0.22.1. ([\#18609](https://github.com/element-hq/synapse/issues/18609))
|
||||
* Bump pyasn1-modules from 0.4.1 to 0.4.2. ([\#18495](https://github.com/element-hq/synapse/issues/18495))
|
||||
* Bump pydantic from 2.11.4 to 2.11.7. ([\#18639](https://github.com/element-hq/synapse/issues/18639))
|
||||
* Bump reqwest from 0.12.15 to 0.12.20. ([\#18590](https://github.com/element-hq/synapse/issues/18590))
|
||||
* Bump reqwest from 0.12.20 to 0.12.22. ([\#18627](https://github.com/element-hq/synapse/issues/18627))
|
||||
* Bump ruff from 0.11.11 to 0.12.1. ([\#18645](https://github.com/element-hq/synapse/issues/18645))
|
||||
* Bump ruff from 0.12.1 to 0.12.2. ([\#18657](https://github.com/element-hq/synapse/issues/18657))
|
||||
* Bump sentry-sdk from 2.22.0 to 2.32.0. ([\#18633](https://github.com/element-hq/synapse/issues/18633))
|
||||
* Bump setuptools-rust from 1.10.2 to 1.11.1. ([\#18655](https://github.com/element-hq/synapse/issues/18655))
|
||||
* Bump sigstore/cosign-installer from 3.8.2 to 3.9.0. ([\#18588](https://github.com/element-hq/synapse/issues/18588))
|
||||
* Bump sigstore/cosign-installer from 3.9.0 to 3.9.1. ([\#18608](https://github.com/element-hq/synapse/issues/18608))
|
||||
* Bump stefanzweifel/git-auto-commit-action from 5.2.0 to 6.0.1. ([\#18607](https://github.com/element-hq/synapse/issues/18607))
|
||||
* Bump tokio from 1.45.1 to 1.46.0. ([\#18628](https://github.com/element-hq/synapse/issues/18628))
|
||||
* Bump tokio from 1.46.0 to 1.46.1. ([\#18667](https://github.com/element-hq/synapse/issues/18667))
|
||||
* Bump treq from 24.9.1 to 25.5.0. ([\#18610](https://github.com/element-hq/synapse/issues/18610))
|
||||
* Bump types-bleach from 6.2.0.20241123 to 6.2.0.20250514. ([\#18634](https://github.com/element-hq/synapse/issues/18634))
|
||||
* Bump types-jsonschema from 4.23.0.20250516 to 4.24.0.20250528. ([\#18611](https://github.com/element-hq/synapse/issues/18611))
|
||||
* Bump types-opentracing from 2.4.10.6 to 2.4.10.20250622. ([\#18586](https://github.com/element-hq/synapse/issues/18586))
|
||||
* Bump types-psycopg2 from 2.9.21.20250318 to 2.9.21.20250516. ([\#18658](https://github.com/element-hq/synapse/issues/18658))
|
||||
* Bump types-pyyaml from 6.0.12.20241230 to 6.0.12.20250516. ([\#18643](https://github.com/element-hq/synapse/issues/18643))
|
||||
* Bump types-setuptools from 75.2.0.20241019 to 80.9.0.20250529. ([\#18644](https://github.com/element-hq/synapse/issues/18644))
|
||||
* Bump typing-extensions from 4.12.2 to 4.14.0. ([\#18654](https://github.com/element-hq/synapse/issues/18654))
|
||||
* Bump typing-extensions from 4.14.0 to 4.14.1. ([\#18668](https://github.com/element-hq/synapse/issues/18668))
|
||||
* Bump urllib3 from 2.2.2 to 2.5.0. ([\#18572](https://github.com/element-hq/synapse/issues/18572))
|
||||
|
||||
# Synapse 1.133.0 (2025-07-01)
|
||||
|
||||
Pre-built wheels are now built using the [manylinux_2_28](https://github.com/pypa/manylinux#manylinux_2_28-almalinux-8-based) base, which is expected to be compatible with distros using glibc 2.28 or later, including:
|
||||
|
||||
47
Cargo.lock
generated
47
Cargo.lock
generated
@@ -65,12 +65,6 @@ dependencies = [
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
@@ -380,7 +374,7 @@ version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"base64",
|
||||
"bytes",
|
||||
"headers-core",
|
||||
"http",
|
||||
@@ -500,7 +494,7 @@ version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"base64",
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
@@ -512,7 +506,7 @@ dependencies = [
|
||||
"libc",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"socket2",
|
||||
"socket2 0.5.9",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
@@ -893,9 +887,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.19.0"
|
||||
version = "1.21.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
|
||||
|
||||
[[package]]
|
||||
name = "openssl-probe"
|
||||
@@ -1048,7 +1042,7 @@ dependencies = [
|
||||
"quinn-udp",
|
||||
"rustc-hash",
|
||||
"rustls",
|
||||
"socket2",
|
||||
"socket2 0.5.9",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tracing",
|
||||
@@ -1080,7 +1074,7 @@ dependencies = [
|
||||
"cfg_aliases",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"socket2",
|
||||
"socket2 0.5.9",
|
||||
"tracing",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
@@ -1190,7 +1184,7 @@ version = "0.12.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"base64",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
@@ -1361,9 +1355,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.140"
|
||||
version = "1.0.141"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
||||
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -1436,6 +1430,16 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.0"
|
||||
@@ -1464,7 +1468,7 @@ name = "synapse"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64 0.21.7",
|
||||
"base64",
|
||||
"blake2",
|
||||
"bytes",
|
||||
"futures",
|
||||
@@ -1476,6 +1480,7 @@ dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"mime",
|
||||
"once_cell",
|
||||
"pyo3",
|
||||
"pyo3-log",
|
||||
"pythonize",
|
||||
@@ -1571,9 +1576,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.46.0"
|
||||
version = "1.47.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1140bb80481756a8cbe10541f37433b459c5aa1e727b4c020fbfebdc25bf3ec4"
|
||||
checksum = "43864ed400b6043a4757a25c7a64a8efde741aed79a056a2fb348a406701bb35"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
@@ -1582,8 +1587,8 @@ dependencies = [
|
||||
"mio",
|
||||
"pin-project-lite",
|
||||
"slab",
|
||||
"socket2",
|
||||
"windows-sys 0.52.0",
|
||||
"socket2 0.6.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
29
README.rst
29
README.rst
@@ -8,7 +8,7 @@
|
||||
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
|
||||
implementation, written and maintained by `Element <https://element.io>`_.
|
||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
||||
secure and interoperable real time communications. You can directly run
|
||||
secure and interoperable real-time communications. You can directly run
|
||||
and manage the source code in this repository, available under an AGPL
|
||||
license (or alternatively under a commercial license from Element).
|
||||
There is no support provided by Element unless you have a
|
||||
@@ -23,13 +23,13 @@ ESS builds on Synapse to offer a complete Matrix-based backend including the ful
|
||||
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
||||
giving admins the power to easily manage an organization-wide
|
||||
deployment. It includes advanced identity management, auditing,
|
||||
moderation and data retention options as well as Long Term Support and
|
||||
SLAs. ESS can be used to support any Matrix-based frontend client.
|
||||
moderation and data retention options as well as Long-Term Support and
|
||||
SLAs. ESS supports any Matrix-compatible client.
|
||||
|
||||
.. contents::
|
||||
|
||||
🛠️ Installing and configuration
|
||||
===============================
|
||||
🛠️ Installation and configuration
|
||||
==================================
|
||||
|
||||
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
||||
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
||||
@@ -133,7 +133,7 @@ connect from a client: see
|
||||
An easy way to get started is to login or register via Element at
|
||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||
You will need to change the server you are logging into from ``matrix.org``
|
||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||
and instead specify a homeserver URL of ``https://<server_name>:8448``
|
||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||
If you prefer to use another client, refer to our
|
||||
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
||||
@@ -162,16 +162,15 @@ the public internet. Without it, anyone can freely register accounts on your hom
|
||||
This can be exploited by attackers to create spambots targeting the rest of the Matrix
|
||||
federation.
|
||||
|
||||
Your new user name will be formed partly from the ``server_name``, and partly
|
||||
from a localpart you specify when you create the account. Your name will take
|
||||
the form of::
|
||||
Your new Matrix ID will be formed partly from the ``server_name``, and partly
|
||||
from a localpart you specify when you create the account in the form of::
|
||||
|
||||
@localpart:my.domain.name
|
||||
|
||||
(pronounced "at localpart on my dot domain dot name").
|
||||
|
||||
As when logging in, you will need to specify a "Custom server". Specify your
|
||||
desired ``localpart`` in the 'User name' box.
|
||||
desired ``localpart`` in the 'Username' box.
|
||||
|
||||
🎯 Troubleshooting and support
|
||||
==============================
|
||||
@@ -209,10 +208,10 @@ Identity servers have the job of mapping email addresses and other 3rd Party
|
||||
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
||||
before creating that mapping.
|
||||
|
||||
**They are not where accounts or credentials are stored - these live on home
|
||||
servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
|
||||
**Identity servers do not store accounts or credentials - these are stored and managed on homeservers.
|
||||
Identity Servers are just for mapping 3rd Party IDs to Matrix IDs.**
|
||||
|
||||
This process is very security-sensitive, as there is obvious risk of spam if it
|
||||
This process is highly security-sensitive, as there is an obvious risk of spam if it
|
||||
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
||||
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
||||
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
||||
@@ -238,9 +237,9 @@ email address.
|
||||
We welcome contributions to Synapse from the community!
|
||||
The best place to get started is our
|
||||
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||
This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
||||
|
||||
This is part of our broader `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
||||
information for Synapse developers as well as Synapse administrators.
|
||||
|
||||
Developers might be particularly interested in:
|
||||
|
||||
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
||||
|
||||
@@ -19,17 +19,17 @@ def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||
# This flag is a no-op in the latest versions. Instead, we need to
|
||||
# specify this in the `bdist_wheel` config below.
|
||||
py_limited_api=True,
|
||||
# We force always building in release mode, as we can't tell the
|
||||
# difference between using `poetry` in development vs production.
|
||||
# We always build in release mode, as we can't distinguish
|
||||
# between using `poetry` in development vs production.
|
||||
debug=False,
|
||||
)
|
||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
||||
setup_kwargs["zip_safe"] = False
|
||||
|
||||
# We lookup the minimum supported python version by looking at
|
||||
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first python
|
||||
# We look up the minimum supported Python version with
|
||||
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first Python
|
||||
# version that matches. We then convert that into the `py_limited_api` form,
|
||||
# e.g. cp39 for python 3.9.
|
||||
# e.g. cp39 for Python 3.9.
|
||||
py_limited_api: str
|
||||
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
||||
for minor_version in itertools.count(start=8):
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Support for [MSC4235](https://github.com/matrix-org/matrix-spec-proposals/pull/4235): via query param for hierarchy endpoint. Contributed by Krishan (@kfiven).
|
||||
@@ -1 +0,0 @@
|
||||
Add `forget_forced_upon_leave` capability as per [MSC4267](https://github.com/matrix-org/matrix-spec-proposals/pull/4267).
|
||||
@@ -1 +0,0 @@
|
||||
Add `federated_user_may_invite` spam checker callback which receives the entire invite event. Contributed by @tulir @ Beeper.
|
||||
@@ -1 +0,0 @@
|
||||
Stop adding the "origin" field to newly-created events (PDUs).
|
||||
1
changelog.d/18474.misc
Normal file
1
changelog.d/18474.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add debug logging for HMAC digest verification failures when using the admin API to register users.
|
||||
@@ -1 +0,0 @@
|
||||
Fix `KeyError` on background updates when using split main/state databases.
|
||||
1
changelog.d/18514.feature
Normal file
1
changelog.d/18514.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add configurable rate limiting for the creation of rooms.
|
||||
@@ -1 +0,0 @@
|
||||
Fix documentation of the Delete Room Admin API's status field.
|
||||
1
changelog.d/18540.feature
Normal file
1
changelog.d/18540.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add support for [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) - Redact on Kick/Ban.
|
||||
@@ -1 +0,0 @@
|
||||
Replace `PyICU` crate with equivalent `icu_segmenter` Rust crate.
|
||||
@@ -1 +0,0 @@
|
||||
Improve docstring on `simple_upsert_many`.
|
||||
1
changelog.d/18574.misc
Normal file
1
changelog.d/18574.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up upgrading a room with large numbers of banned users.
|
||||
@@ -1 +0,0 @@
|
||||
Raise poetry-core version cap to 2.1.3.
|
||||
@@ -1 +0,0 @@
|
||||
Raise setuptools_rust version cap to 1.11.1.
|
||||
1
changelog.d/18580.misc
Normal file
1
changelog.d/18580.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix config documentation generation script on Windows by enforcing UTF-8.
|
||||
@@ -1 +0,0 @@
|
||||
Improve performance of device deletion by adding missing index.
|
||||
1
changelog.d/18585.feature
Normal file
1
changelog.d/18585.feature
Normal file
@@ -0,0 +1 @@
|
||||
When admins enable themselves to see soft-failed events, they will also see if the cause is due to the policy server flagging them as spam via `unsigned`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix `avatar_url` and `displayname` being sent on federation profile queries when they are not set.
|
||||
@@ -1 +0,0 @@
|
||||
Respond with 401 & `M_USER_LOCKED` when a locked user calls `POST /login`, as per the spec.
|
||||
@@ -1 +0,0 @@
|
||||
Better handling of ratelimited requests.
|
||||
@@ -1 +0,0 @@
|
||||
Update to Rust 1.87.0 in CI, and bump the pinned commit of the `dtolnay/rust-toolchain` GitHub Action to `b3b07ba8b418998c39fb20f53e8b695cdcc8de1b`.
|
||||
@@ -1 +0,0 @@
|
||||
Better handling of ratelimited requests.
|
||||
@@ -1 +0,0 @@
|
||||
Speed up bulk device deletion.
|
||||
@@ -1 +0,0 @@
|
||||
Ensure policy servers are not asked to scan policy server change events, allowing rooms to disable the use of a policy server while the policy server is down.
|
||||
@@ -1 +0,0 @@
|
||||
Log the room ID we're purging state for.
|
||||
@@ -1 +0,0 @@
|
||||
Replace `PyICU` crate with equivalent `icu_segmenter` Rust crate.
|
||||
1
changelog.d/18656.misc
Normal file
1
changelog.d/18656.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `Counter` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18670.misc
Normal file
1
changelog.d/18670.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor background process metrics to be homeserver-scoped.
|
||||
1
changelog.d/18686.feature
Normal file
1
changelog.d/18686.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add ability to configure forward/outbound proxy via homeserver config instead of environment variables. See `http_proxy`, `https_proxy`, `no_proxy_hosts`.
|
||||
1
changelog.d/18696.bugfix
Normal file
1
changelog.d/18696.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Allow return code 403 (allowed by C2S Spec since v1.2) when fetching profiles via federation.
|
||||
1
changelog.d/18700.doc
Normal file
1
changelog.d/18700.doc
Normal file
@@ -0,0 +1 @@
|
||||
Minor improvements to README.
|
||||
1
changelog.d/18714.misc
Normal file
1
changelog.d/18714.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `LaterGauge` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18715.misc
Normal file
1
changelog.d/18715.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `GaugeBucketCollector` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18718.misc
Normal file
1
changelog.d/18718.misc
Normal file
@@ -0,0 +1 @@
|
||||
Reduce database usage in Sliding Sync by not querying for background update completion after the update is known to be complete.
|
||||
1
changelog.d/18722.feature
Normal file
1
changelog.d/18722.feature
Normal file
@@ -0,0 +1 @@
|
||||
Advertise experimental support for [MSC4306](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) through `/_matrix/clients/versions` if enabled.
|
||||
1
changelog.d/18724.misc
Normal file
1
changelog.d/18724.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `Histogram` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18725.misc
Normal file
1
changelog.d/18725.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `Gauge` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18726.bugfix
Normal file
1
changelog.d/18726.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Register the MSC4306 endpoints in the CS API when the experimental feature is enabled.
|
||||
1
changelog.d/18727.misc
Normal file
1
changelog.d/18727.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump minimum version bound on Twisted to 21.2.0.
|
||||
1
changelog.d/18728.misc
Normal file
1
changelog.d/18728.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use `twisted.internet.testing` module in tests instead of deprecated `twisted.test.proto_helpers`.
|
||||
1
changelog.d/18729.misc
Normal file
1
changelog.d/18729.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump minimum version bound on Twisted to 21.2.0.
|
||||
1
changelog.d/18730.misc
Normal file
1
changelog.d/18730.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove obsolete `/send_event` replication endpoint.
|
||||
1
changelog.d/18736.misc
Normal file
1
changelog.d/18736.misc
Normal file
@@ -0,0 +1 @@
|
||||
Work around `twisted.protocols.amp.TooLong` error by reducing logging in some tests.
|
||||
1
changelog.d/18737.removal
Normal file
1
changelog.d/18737.removal
Normal file
@@ -0,0 +1 @@
|
||||
Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`. See the relevant section in the upgrade notes for more information.
|
||||
1
changelog.d/18750.bugfix
Normal file
1
changelog.d/18750.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where suspended users could not have server notices sent to them (a 403 was returned to the admin).
|
||||
1
changelog.d/18753.misc
Normal file
1
changelog.d/18753.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `Histogram` metrics to be homeserver-scoped.
|
||||
@@ -4396,7 +4396,7 @@
|
||||
"exemplar": false,
|
||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
||||
"instant": false,
|
||||
"legendFormat": "{{server_name}} ",
|
||||
"legendFormat": "{{origin_server_name}} ",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
@@ -4518,7 +4518,7 @@
|
||||
"exemplar": false,
|
||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
||||
"instant": false,
|
||||
"legendFormat": "{{server_name}}",
|
||||
"legendFormat": "{{destination_server_name}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
|
||||
24
debian/changelog
vendored
24
debian/changelog
vendored
@@ -1,3 +1,27 @@
|
||||
matrix-synapse-py3 (1.135.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jul 2025 12:19:14 +0100
|
||||
|
||||
matrix-synapse-py3 (1.135.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Jul 2025 12:08:37 +0100
|
||||
|
||||
matrix-synapse-py3 (1.134.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.134.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jul 2025 14:22:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.134.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Jul 2025 11:27:13 +0100
|
||||
|
||||
matrix-synapse-py3 (1.133.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.133.0.
|
||||
|
||||
@@ -54,7 +54,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister:2, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
user_dir, \
|
||||
media_repository, \
|
||||
@@ -65,6 +64,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
client_reader, \
|
||||
appservice, \
|
||||
pusher, \
|
||||
device_lists:2, \
|
||||
stream_writers=account_data+presence+receipts+to_device+typing"
|
||||
|
||||
fi
|
||||
|
||||
@@ -98,6 +98,10 @@ rc_delayed_event_mgmt:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_room_creation:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
allow_device_name_lookup_over_federation: true
|
||||
|
||||
@@ -178,6 +178,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/deactivate$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)",
|
||||
"^/_matrix/client/(r0|v3)/delete_devices$",
|
||||
"^/_matrix/client/versions$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||
@@ -194,6 +197,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@@ -265,13 +271,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"frontend_proxy": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"account_data": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
@@ -306,6 +305,13 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"device_lists": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": [],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"typing": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
@@ -322,6 +328,15 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"thread_subscriptions": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/unstable/io.element.msc4306/.*",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
}
|
||||
|
||||
# Templates for sections that may be inserted multiple times in config files
|
||||
@@ -412,16 +427,18 @@ def add_worker_roles_to_shared_config(
|
||||
# streams
|
||||
instance_map = shared_config.setdefault("instance_map", {})
|
||||
|
||||
# This is a list of the stream_writers that there can be only one of. Events can be
|
||||
# sharded, and therefore doesn't belong here.
|
||||
singular_stream_writers = [
|
||||
# This is a list of the stream_writers.
|
||||
stream_writers = {
|
||||
"account_data",
|
||||
"events",
|
||||
"device_lists",
|
||||
"presence",
|
||||
"receipts",
|
||||
"to_device",
|
||||
"typing",
|
||||
"push_rules",
|
||||
]
|
||||
"thread_subscriptions",
|
||||
}
|
||||
|
||||
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
||||
# roles, check each.
|
||||
@@ -431,28 +448,11 @@ def add_worker_roles_to_shared_config(
|
||||
if "federation_sender" in worker_types_set:
|
||||
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
||||
|
||||
if "event_persister" in worker_types_set:
|
||||
# Event persisters write to the events stream, so we need to update
|
||||
# the list of event stream writers
|
||||
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
||||
worker_name
|
||||
)
|
||||
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
||||
instance_map[worker_name] = {
|
||||
"path": f"/run/worker.{worker_port}",
|
||||
}
|
||||
else:
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
# Update the list of stream writers. It's convenient that the name of the worker
|
||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||
# is more than one.
|
||||
for worker in worker_types_set:
|
||||
if worker in singular_stream_writers:
|
||||
if worker in stream_writers:
|
||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||
worker, []
|
||||
).append(worker_name)
|
||||
@@ -876,6 +876,13 @@ def generate_worker_files(
|
||||
else:
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Special case for event_persister: those are just workers that write to
|
||||
# the `events` stream. For other workers, the worker name is the same
|
||||
# name of the stream they write to, but for some reason it is not the
|
||||
# case for event_persister.
|
||||
if "event_persister" in worker_types_set:
|
||||
worker_types_set.add("events")
|
||||
|
||||
# Update the shared config with sharding-related options if necessary
|
||||
add_worker_roles_to_shared_config(
|
||||
shared_config, worker_types_set, worker_name, worker_port
|
||||
|
||||
@@ -74,6 +74,7 @@
|
||||
- [Users](admin_api/user_admin_api.md)
|
||||
- [Server Version](admin_api/version_api.md)
|
||||
- [Federation](usage/administration/admin_api/federation.md)
|
||||
- [Client-Server API Extensions](admin_api/client_server_api_extensions.md)
|
||||
- [Manhole](manhole.md)
|
||||
- [Monitoring](metrics-howto.md)
|
||||
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||
|
||||
67
docs/admin_api/client_server_api_extensions.md
Normal file
67
docs/admin_api/client_server_api_extensions.md
Normal file
@@ -0,0 +1,67 @@
|
||||
# Client-Server API Extensions
|
||||
|
||||
Server administrators can set special account data to change how the Client-Server API behaves for
|
||||
their clients. Setting the account data, or having it already set, as a non-admin has no effect.
|
||||
|
||||
All configuration options can be set through the `io.element.synapse.admin_client_config` global
|
||||
account data on the admin's user account.
|
||||
|
||||
Example:
|
||||
```
|
||||
PUT /_matrix/client/v3/user/{adminUserId}/account_data/io.element.synapse.admin_client_config
|
||||
{
|
||||
"return_soft_failed_events": true
|
||||
}
|
||||
```
|
||||
|
||||
## See soft failed events
|
||||
|
||||
Learn more about soft failure from [the spec](https://spec.matrix.org/v1.14/server-server-api/#soft-failure).
|
||||
|
||||
To receive soft failed events in APIs like `/sync` and `/messages`, set `return_soft_failed_events`
|
||||
to `true` in the admin client config. When `false`, the normal behaviour of these endpoints is to
|
||||
exclude soft failed events.
|
||||
|
||||
**Note**: If the policy server flagged the event as spam and that caused soft failure, that will be indicated
|
||||
in the event's `unsigned` content like so:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "m.room.message",
|
||||
"other": "event_fields_go_here",
|
||||
"unsigned": {
|
||||
"io.element.synapse.soft_failed": true,
|
||||
"io.element.synapse.policy_server_spammy": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Default: `false`
|
||||
|
||||
## See events marked spammy by policy servers
|
||||
|
||||
Learn more about policy servers from [MSC4284](https://github.com/matrix-org/matrix-spec-proposals/pull/4284).
|
||||
|
||||
Similar to `return_soft_failed_events`, clients logged in with admin accounts can see events which were
|
||||
flagged by the policy server as spammy (and thus soft failed) by setting `return_policy_server_spammy_events`
|
||||
to `true`.
|
||||
|
||||
`return_policy_server_spammy_events` may be `true` while `return_soft_failed_events` is `false` to only see
|
||||
policy server-flagged events. When `return_soft_failed_events` is `true` however, `return_policy_server_spammy_events`
|
||||
is always `true`.
|
||||
|
||||
Events which were flagged by the policy will be flagged as `io.element.synapse.policy_server_spammy` in the
|
||||
event's `unsigned` content, like so:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "m.room.message",
|
||||
"other": "event_fields_go_here",
|
||||
"unsigned": {
|
||||
"io.element.synapse.soft_failed": true,
|
||||
"io.element.synapse.policy_server_spammy": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Default: `true` if `return_soft_failed_events` is `true`, otherwise `false`
|
||||
@@ -1227,7 +1227,7 @@ See also the
|
||||
|
||||
## Controlling whether a user is shadow-banned
|
||||
|
||||
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
|
||||
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
|
||||
A shadow-banned users receives successful responses to their client-server API requests,
|
||||
but the events are not propagated into rooms. This can be an effective tool as it
|
||||
(hopefully) takes longer for the user to realise they are being moderated before
|
||||
@@ -1464,8 +1464,11 @@ _Added in Synapse 1.72.0._
|
||||
|
||||
## Redact all the events of a user
|
||||
|
||||
This endpoint allows an admin to redact the events of a given user. There are no restrictions on redactions for a
|
||||
local user. By default, we puppet the user who sent the message to redact it themselves. Redactions for non-local users are issued using the admin user, and will fail in rooms where the admin user is not admin/does not have the specified power level to issue redactions.
|
||||
This endpoint allows an admin to redact the events of a given user. There are no restrictions on
|
||||
redactions for a local user. By default, we puppet the user who sent the message to redact it themselves.
|
||||
Redactions for non-local users are issued using the admin user, and will fail in rooms where the
|
||||
admin user is not admin/does not have the specified power level to issue redactions. An option
|
||||
is provided to override the default and allow the admin to issue the redactions in all cases.
|
||||
|
||||
The API is
|
||||
```
|
||||
@@ -1475,7 +1478,7 @@ POST /_synapse/admin/v1/user/$user_id/redact
|
||||
"rooms": ["!roomid1", "!roomid2"]
|
||||
}
|
||||
```
|
||||
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
|
||||
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
|
||||
otherwise all the events in the rooms provided in the request will be redacted.
|
||||
|
||||
The API starts redaction process running, and returns immediately with a JSON body with
|
||||
@@ -1501,7 +1504,10 @@ The following JSON body parameter must be provided:
|
||||
The following JSON body parameters are optional:
|
||||
|
||||
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
|
||||
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
|
||||
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided.
|
||||
- `use_admin` - If set to `true`, the admin user is used to issue the redactions, rather than puppeting the user. Useful
|
||||
when the admin is also the moderator of the rooms that require redactions. Note that the redactions will fail in rooms
|
||||
where the admin does not have the sufficient power level to issue the redactions.
|
||||
|
||||
_Added in Synapse 1.116.0._
|
||||
|
||||
|
||||
@@ -7,8 +7,23 @@ proxy is supported, not SOCKS proxy or anything else.
|
||||
|
||||
## Configure
|
||||
|
||||
The `http_proxy`, `https_proxy`, `no_proxy` environment variables are used to
|
||||
specify proxy settings. The environment variable is not case sensitive.
|
||||
The proxy settings can be configured in the homeserver configuration file via
|
||||
[`http_proxy`](../usage/configuration/config_documentation.md#http_proxy),
|
||||
[`https_proxy`](../usage/configuration/config_documentation.md#https_proxy), and
|
||||
[`no_proxy_hosts`](../usage/configuration/config_documentation.md#no_proxy_hosts).
|
||||
|
||||
`homeserver.yaml` example:
|
||||
```yaml
|
||||
http_proxy: http://USERNAME:PASSWORD@10.0.1.1:8080/
|
||||
https_proxy: http://USERNAME:PASSWORD@proxy.example.com:8080/
|
||||
no_proxy_hosts:
|
||||
- master.hostname.example.com
|
||||
- 10.1.0.0/16
|
||||
- 172.30.0.0/16
|
||||
```
|
||||
|
||||
The proxy settings can also be configured via the `http_proxy`, `https_proxy`,
|
||||
`no_proxy` environment variables. The environment variable is not case sensitive.
|
||||
- `http_proxy`: Proxy server to use for HTTP requests.
|
||||
- `https_proxy`: Proxy server to use for HTTPS requests.
|
||||
- `no_proxy`: Comma-separated list of hosts, IP addresses, or IP ranges in CIDR
|
||||
@@ -44,7 +59,7 @@ The proxy will be **used** for:
|
||||
- phone-home stats
|
||||
- recaptcha validation
|
||||
- CAS auth validation
|
||||
- OpenID Connect
|
||||
- OpenID Connect (OIDC)
|
||||
- Outbound federation
|
||||
- Federation (checking public key revocation)
|
||||
- Fetching public keys of other servers
|
||||
@@ -53,7 +68,7 @@ The proxy will be **used** for:
|
||||
It will **not be used** for:
|
||||
|
||||
- Application Services
|
||||
- Identity servers
|
||||
- Matrix Identity servers
|
||||
- In worker configurations
|
||||
- connections between workers
|
||||
- connections from workers to Redis
|
||||
|
||||
@@ -117,6 +117,63 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.136.0
|
||||
|
||||
## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`
|
||||
|
||||
The `run_as_background_process` function is now a method of the `ModuleApi` class. If
|
||||
you were using the function directly from the module API, it will continue to work fine
|
||||
but the background process metrics will not include an accurate `server_name` label.
|
||||
This kind of metric labeling isn't relevant for many use cases and is used to
|
||||
differentiate Synapse instances running in the same Python process (relevant to Synapse
|
||||
Pro: Small Hosts). We recommend updating your usage to use the new
|
||||
`ModuleApi.run_as_background_process` method to stay on top of future changes.
|
||||
|
||||
<details>
|
||||
<summary>Example <code>run_as_background_process</code> upgrade</summary>
|
||||
|
||||
Before:
|
||||
```python
|
||||
class MyModule:
|
||||
def __init__(self, module_api: ModuleApi) -> None:
|
||||
run_as_background_process(__name__ + ":setup_database", self.setup_database)
|
||||
```
|
||||
|
||||
After:
|
||||
```python
|
||||
class MyModule:
|
||||
def __init__(self, module_api: ModuleApi) -> None:
|
||||
module_api.run_as_background_process(__name__ + ":setup_database", self.setup_database)
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
## Metric labels have changed on `synapse_federation_last_received_pdu_time` and `synapse_federation_last_sent_pdu_time`
|
||||
|
||||
Previously, the `synapse_federation_last_received_pdu_time` and
|
||||
`synapse_federation_last_sent_pdu_time` metrics both used the `server_name` label to
|
||||
differentiate between different servers that we send and receive events from.
|
||||
|
||||
Since we're now using the `server_name` label to differentiate between different Synapse
|
||||
homeserver instances running in the same process, these metrics have been changed as follows:
|
||||
|
||||
- `synapse_federation_last_received_pdu_time` now uses the `origin_server_name` label
|
||||
- `synapse_federation_last_sent_pdu_time` now uses the `destination_server_name` label
|
||||
|
||||
The Grafana dashboard JSON in `contrib/grafana/synapse.json` has been updated to reflect
|
||||
this change but you will need to manually update your own existing Grafana dashboards
|
||||
using these metrics.
|
||||
|
||||
|
||||
|
||||
# Upgrading to v1.135.0
|
||||
|
||||
## `on_user_registration` module API callback may now run on any worker
|
||||
|
||||
Previously, the `on_user_registration` callback would only run on the main
|
||||
process. Modules relying on this callback must assume that they may now be
|
||||
called from any worker, not just the main process.
|
||||
|
||||
# Upgrading to v1.134.0
|
||||
|
||||
## ICU bundled with Synapse
|
||||
|
||||
@@ -610,6 +610,39 @@ manhole_settings:
|
||||
ssh_pub_key_path: CONFDIR/id_rsa.pub
|
||||
```
|
||||
---
|
||||
### `http_proxy`
|
||||
|
||||
*(string|null)* Proxy server to use for HTTP requests.
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md). There is no default for this option.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
http_proxy: http://USERNAME:PASSWORD@10.0.1.1:8080/
|
||||
```
|
||||
---
|
||||
### `https_proxy`
|
||||
|
||||
*(string|null)* Proxy server to use for HTTPS requests.
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md). There is no default for this option.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
https_proxy: http://USERNAME:PASSWORD@proxy.example.com:8080/
|
||||
```
|
||||
---
|
||||
### `no_proxy_hosts`
|
||||
|
||||
*(array)* List of hosts, IP addresses, or IP ranges in CIDR format which should not use the proxy. Synapse will directly connect to these hosts.
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md). There is no default for this option.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
no_proxy_hosts:
|
||||
- master.hostname.example.com
|
||||
- 10.1.0.0/16
|
||||
- 172.30.0.0/16
|
||||
```
|
||||
---
|
||||
### `dummy_events_threshold`
|
||||
|
||||
*(integer)* Forward extremities can build up in a room due to networking delays between homeservers. Once this happens in a large room, calculation of the state of that room can become quite expensive. To mitigate this, once the number of forward extremities reaches a given threshold, Synapse will send an `org.matrix.dummy_event` event, which will reduce the forward extremities in the room.
|
||||
@@ -1925,9 +1958,8 @@ This setting has the following sub-options:
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_delayed_event_mgmt:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
@@ -1964,6 +1996,31 @@ rc_reports:
|
||||
burst_count: 20.0
|
||||
```
|
||||
---
|
||||
### `rc_room_creation`
|
||||
|
||||
*(object)* Sets rate limits for how often users are able to create rooms.
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `per_second` (number): Maximum number of requests a client can send per second.
|
||||
|
||||
* `burst_count` (number): Maximum number of requests a client can send before being throttled.
|
||||
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_room_creation:
|
||||
per_user:
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
rc_room_creation:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
```
|
||||
---
|
||||
### `federation_rr_transactions_per_room_per_second`
|
||||
|
||||
*(integer)* Sets outgoing federation transaction frequency for sending read-receipts, per-room.
|
||||
@@ -2086,6 +2143,23 @@ Example configuration:
|
||||
max_upload_size: 60M
|
||||
```
|
||||
---
|
||||
### `media_upload_limits`
|
||||
|
||||
*(array)* A list of media upload limits defining how much data a given user can upload in a given time period.
|
||||
|
||||
An empty list means no limits are applied.
|
||||
|
||||
Defaults to `[]`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
media_upload_limits:
|
||||
- time_period: 1h
|
||||
max_size: 100M
|
||||
- time_period: 1w
|
||||
max_size: 500M
|
||||
```
|
||||
---
|
||||
### `max_image_pixels`
|
||||
|
||||
*(byte size)* Maximum number of pixels that will be thumbnailed. Defaults to `"32M"`.
|
||||
@@ -2340,6 +2414,21 @@ Example configuration:
|
||||
recaptcha_public_key: YOUR_PUBLIC_KEY
|
||||
```
|
||||
---
|
||||
### `recaptcha_public_key_path`
|
||||
|
||||
*(string|null)* An alternative to [`recaptcha_public_key`](#recaptcha_public_key): allows the public key to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the public key. Synapse reads the public key from the given file once at startup.
|
||||
|
||||
_Added in Synapse 1.135.0._
|
||||
|
||||
Defaults to `null`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
recaptcha_public_key_path: /path/to/key/file
|
||||
```
|
||||
---
|
||||
### `recaptcha_private_key`
|
||||
|
||||
*(string|null)* This homeserver's ReCAPTCHA private key. Must be specified if [`enable_registration_captcha`](#enable_registration_captcha) is enabled. Defaults to `null`.
|
||||
@@ -2349,6 +2438,21 @@ Example configuration:
|
||||
recaptcha_private_key: YOUR_PRIVATE_KEY
|
||||
```
|
||||
---
|
||||
### `recaptcha_private_key_path`
|
||||
|
||||
*(string|null)* An alternative to [`recaptcha_private_key`](#recaptcha_private_key): allows the private key to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the private key. Synapse reads the private key from the given file once at startup.
|
||||
|
||||
_Added in Synapse 1.135.0._
|
||||
|
||||
Defaults to `null`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
recaptcha_private_key_path: /path/to/key/file
|
||||
```
|
||||
---
|
||||
### `enable_registration_captcha`
|
||||
|
||||
*(boolean)* Set to `true` to require users to complete a CAPTCHA test when registering an account. Requires a valid ReCaptcha public/private key.
|
||||
@@ -3761,7 +3865,11 @@ encryption_enabled_by_default_for_room_type: invite
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `enabled` (boolean): Defines whether users can search the user directory. If false then empty responses are returned to all queries. Defaults to `true`.
|
||||
* `enabled` (boolean): Defines whether users can search the user directory. If `false` then empty responses are returned to all queries.
|
||||
|
||||
*Warning: While the homeserver may determine which subset of users are searched, the Matrix specification requires homeservers to include (at minimum) users visible in public rooms and users sharing a room with the requester. Using `false` improves performance but violates this requirement.*
|
||||
|
||||
Defaults to `true`.
|
||||
|
||||
* `search_all_users` (boolean): Defines whether to search all users visible to your homeserver at the time the search is performed. If set to true, will return all users known to the homeserver matching the search query. If false, search results will only contain users visible in public rooms and users sharing a room with the requester.
|
||||
|
||||
@@ -4291,6 +4399,8 @@ This setting has the following sub-options:
|
||||
|
||||
* `push_rules` (string): Name of a worker assigned to the `push_rules` stream.
|
||||
|
||||
* `device_lists` (string): Name of a worker assigned to the `device_lists` stream.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
stream_writers:
|
||||
|
||||
@@ -238,7 +238,9 @@ information.
|
||||
^/_matrix/client/unstable/im.nheko.summary/summary/.*$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/3pid$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/whoami$
|
||||
^/_matrix/client/(r0|v3|unstable)/devices$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/deactivate$
|
||||
^/_matrix/client/(r0|v3)/delete_devices$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)
|
||||
^/_matrix/client/versions$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event/
|
||||
@@ -257,7 +259,9 @@ information.
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
||||
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable/keys/device_signing/upload$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$
|
||||
|
||||
# Registration/login requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
||||
@@ -282,7 +286,6 @@ Additionally, the following REST endpoints can be handled for GET requests:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
||||
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/devices/
|
||||
|
||||
# Account data requests
|
||||
^/_matrix/client/(r0|v3|unstable)/.*/tags
|
||||
@@ -329,7 +332,6 @@ set to `true`), the following endpoints can be handled by the worker:
|
||||
^/_synapse/admin/v2/users/[^/]+$
|
||||
^/_synapse/admin/v1/username_available$
|
||||
^/_synapse/admin/v1/users/[^/]+/_allow_cross_signing_replacement_without_uia$
|
||||
# Only the GET method:
|
||||
^/_synapse/admin/v1/users/[^/]+/devices$
|
||||
|
||||
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
|
||||
@@ -550,6 +552,18 @@ the stream writer for the `push_rules` stream:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
||||
|
||||
##### The `device_lists` stream
|
||||
|
||||
The `device_lists` stream supports multiple writers. The following endpoints
|
||||
can be handled by any worker, but should be routed directly one of the workers
|
||||
configured as stream writer for the `device_lists` stream:
|
||||
|
||||
^/_matrix/client/(r0|v3)/delete_devices$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/devices/
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable/keys/device_signing/upload$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$
|
||||
|
||||
#### Restrict outbound federation traffic to a specific set of workers
|
||||
|
||||
The
|
||||
|
||||
463
poetry.lock
generated
463
poetry.lock
generated
@@ -34,15 +34,15 @@ tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" a
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.6.0"
|
||||
version = "1.6.1"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
|
||||
files = [
|
||||
{file = "authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d"},
|
||||
{file = "authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210"},
|
||||
{file = "authlib-1.6.1-py2.py3-none-any.whl", hash = "sha256:e9d2031c34c6309373ab845afc24168fe9e93dc52d252631f52642f21f5ed06e"},
|
||||
{file = "authlib-1.6.1.tar.gz", hash = "sha256:4dffdbb1460ba6ec8c17981a4c67af7d8af131231b5a36a88a1e8c80c111cdfd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -366,21 +366,6 @@ files = [
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "commonmark"
|
||||
version = "0.9.1"
|
||||
description = "Python parser for the CommonMark Markdown spec"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"},
|
||||
{file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "constantly"
|
||||
version = "15.1.0"
|
||||
@@ -519,18 +504,19 @@ smmap = ">=3.0.1,<6"
|
||||
|
||||
[[package]]
|
||||
name = "gitpython"
|
||||
version = "3.1.44"
|
||||
version = "3.1.45"
|
||||
description = "GitPython is a Python library used to interact with Git repositories"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"},
|
||||
{file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"},
|
||||
{file = "gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77"},
|
||||
{file = "gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
gitdb = ">=4.0.1,<5"
|
||||
typing-extensions = {version = ">=3.10.0.2", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"]
|
||||
@@ -951,14 +937,14 @@ i18n = ["Babel (>=2.7)"]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.24.0"
|
||||
version = "4.25.0"
|
||||
description = "An implementation of JSON Schema validation for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"},
|
||||
{file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"},
|
||||
{file = "jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716"},
|
||||
{file = "jsonschema-4.25.0.tar.gz", hash = "sha256:e63acf5c11762c0e6672ffb61482bdf57f0876684d8d249c0fe2d730d48bc55f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -969,7 +955,7 @@ rpds-py = ">=0.7.1"
|
||||
|
||||
[package.extras]
|
||||
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
|
||||
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"]
|
||||
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema-specifications"
|
||||
@@ -1029,145 +1015,107 @@ pyasn1 = ">=0.4.6"
|
||||
|
||||
[[package]]
|
||||
name = "lxml"
|
||||
version = "5.4.0"
|
||||
version = "6.0.0"
|
||||
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"url-preview\""
|
||||
files = [
|
||||
{file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7be701c24e7f843e6788353c055d806e8bd8466b52907bafe5d13ec6a6dbaecd"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb54f7c6bafaa808f27166569b1511fc42701a7713858dddc08afdde9746849e"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97dac543661e84a284502e0cf8a67b5c711b0ad5fb661d1bd505c02f8cf716d7"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:c70e93fba207106cb16bf852e421c37bbded92acd5964390aad07cb50d60f5cf"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9c886b481aefdf818ad44846145f6eaf373a20d200b5ce1a5c8e1bc2d8745410"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:fa0e294046de09acd6146be0ed6727d1f42ded4ce3ea1e9a19c11b6774eea27c"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:61c7bbf432f09ee44b1ccaa24896d21075e533cd01477966a5ff5a71d88b2f56"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f7f991a68d20c75cb13c5c9142b2a3f9eb161f1f12a9489c82172d1f133c0"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ac7ba71f9561cd7d7b55e1ea5511543c0282e2b6450f122672a2694621d63b7e"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:ce31158630a6ac85bddd6b830cffd46085ff90498b397bd0a259f59d27a12188"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eaf24066ad0b30917186420d51e2e3edf4b0e2ea68d8cd885b14dc8afdcf6556"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b31a3a77501d86d8ade128abb01082724c0dfd9524f542f2f07d693c9f1175f"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e108352e203c7afd0eb91d782582f00a0b16a948d204d4dec8565024fafeea5"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11a96c3b3f7551c8a8109aa65e8594e551d5a84c76bf950da33d0fb6dfafab7"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ca755eebf0d9e62d6cb013f1261e510317a41bf4650f22963474a663fdfe02aa"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4cd915c0fb1bed47b5e6d6edd424ac25856252f09120e3e8ba5154b6b921860e"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:226046e386556a45ebc787871d6d2467b32c37ce76c2680f5c608e25823ffc84"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b108134b9667bcd71236c5a02aad5ddd073e372fb5d48ea74853e009fe38acb6"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-win32.whl", hash = "sha256:1320091caa89805df7dcb9e908add28166113dcd062590668514dbd510798c88"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:073eb6dcdf1f587d9b88c8c93528b57eccda40209cf9be549d469b942b41d70b"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bda3ea44c39eb74e2488297bb39d47186ed01342f0022c8ff407c250ac3f498e"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ceaf423b50ecfc23ca00b7f50b64baba85fb3fb91c53e2c9d00bc86150c7e40"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:664cdc733bc87449fe781dbb1f309090966c11cc0c0cd7b84af956a02a8a4729"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67ed8a40665b84d161bae3181aa2763beea3747f748bca5874b4af4d75998f87"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4a3bd174cc9cdaa1afbc4620c049038b441d6ba07629d89a83b408e54c35cd"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b0989737a3ba6cf2a16efb857fb0dfa20bc5c542737fddb6d893fde48be45433"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:dc0af80267edc68adf85f2a5d9be1cdf062f973db6790c1d065e45025fa26140"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:639978bccb04c42677db43c79bdaa23785dc7f9b83bfd87570da8207872f1ce5"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a99d86351f9c15e4a901fc56404b485b1462039db59288b203f8c629260a142"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-win32.whl", hash = "sha256:3e6d5557989cdc3ebb5302bbdc42b439733a841891762ded9514e74f60319ad6"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8c9b7f16b63e65bbba889acb436a1034a82d34fa09752d754f88d708eca80e1"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f11a1526ebd0dee85e7b1e39e39a0cc0d9d03fb527f56d8457f6df48a10dc0c"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4afaf38bf79109bb060d9016fad014a9a48fb244e11b94f74ae366a64d252"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de6f6bb8a7840c7bf216fb83eec4e2f79f7325eca8858167b68708b929ab2172"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5cca36a194a4eb4e2ed6be36923d3cffd03dcdf477515dea687185506583d4c9"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b7c86884ad23d61b025989d99bfdd92a7351de956e01c61307cb87035960bcb1"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:53d9469ab5460402c19553b56c3648746774ecd0681b1b27ea74d5d8a3ef5590"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56dbdbab0551532bb26c19c914848d7251d73edb507c3079d6805fa8bba5b706"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14479c2ad1cb08b62bb941ba8e0e05938524ee3c3114644df905d2331c76cd57"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32697d2ea994e0db19c1df9e40275ffe84973e4232b5c274f47e7c1ec9763cdd"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24f6df5f24fc3385f622c0c9d63fe34604893bc1a5bdbb2dbf5870f85f9a404a"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:151d6c40bc9db11e960619d2bf2ec5829f0aaffb10b41dcf6ad2ce0f3c0b2325"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4025bf2884ac4370a3243c5aa8d66d3cb9e15d3ddd0af2d796eccc5f0244390e"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9459e6892f59ecea2e2584ee1058f5d8f629446eab52ba2305ae13a32a059530"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47fb24cc0f052f0576ea382872b3fc7e1f7e3028e53299ea751839418ade92a6"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50441c9de951a153c698b9b99992e806b71c1f36d14b154592580ff4a9d0d877"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ab339536aa798b1e17750733663d272038bf28069761d5be57cb4a9b0137b4f8"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9776af1aad5a4b4a1317242ee2bea51da54b2a7b7b48674be736d463c999f37d"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:63e7968ff83da2eb6fdda967483a7a023aa497d85ad8f05c3ad9b1f2e8c84987"},
|
||||
{file = "lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:35bc626eec405f745199200ccb5c6b36f202675d204aa29bb52e27ba2b71dea8"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:246b40f8a4aec341cbbf52617cad8ab7c888d944bfe12a6abd2b1f6cfb6f6082"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2793a627e95d119e9f1e19720730472f5543a6d84c50ea33313ce328d870f2dd"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:46b9ed911f36bfeb6338e0b482e7fe7c27d362c52fde29f221fddbc9ee2227e7"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b4790b558bee331a933e08883c423f65bbcd07e278f91b2272489e31ab1e2b4"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2030956cf4886b10be9a0285c6802e078ec2391e1dd7ff3eb509c2c95a69b76"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23854ecf381ab1facc8f353dcd9adeddef3652268ee75297c1164c987c11dc"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:43fe5af2d590bf4691531b1d9a2495d7aab2090547eaacd224a3afec95706d76"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74e748012f8c19b47f7d6321ac929a9a94ee92ef12bc4298c47e8b7219b26541"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:43cfbb7db02b30ad3926e8fceaef260ba2fb7df787e38fa2df890c1ca7966c3b"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:34190a1ec4f1e84af256495436b2d196529c3f2094f0af80202947567fdbf2e7"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-win32.whl", hash = "sha256:5967fe415b1920a3877a4195e9a2b779249630ee49ece22021c690320ff07452"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:f3389924581d9a770c6caa4df4e74b606180869043b9073e2cec324bad6e306e"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:522fe7abb41309e9543b0d9b8b434f2b630c5fdaf6482bee642b34c8c70079c8"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ee56288d0df919e4aac43b539dd0e34bb55d6a12a6562038e8d6f3ed07f9e36"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8dd6dd0e9c1992613ccda2bcb74fc9d49159dbe0f0ca4753f37527749885c25"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d7ae472f74afcc47320238b5dbfd363aba111a525943c8a34a1b657c6be934c3"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5592401cdf3dc682194727c1ddaa8aa0f3ddc57ca64fd03226a430b955eab6f6"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58ffd35bd5425c3c3b9692d078bf7ab851441434531a7e517c4984d5634cd65b"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f720a14aa102a38907c6d5030e3d66b3b680c3e6f6bc95473931ea3c00c59967"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a5e8d207311a0170aca0eb6b160af91adc29ec121832e4ac151a57743a1e1e"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2dd1cc3ea7e60bfb31ff32cafe07e24839df573a5e7c2d33304082a5019bcd58"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cfcf84f1defed7e5798ef4f88aa25fcc52d279be731ce904789aa7ccfb7e8d2"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a52a4704811e2623b0324a18d41ad4b9fabf43ce5ff99b14e40a520e2190c851"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c16304bba98f48a28ae10e32a8e75c349dd742c45156f297e16eeb1ba9287a1f"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-win32.whl", hash = "sha256:f8d19565ae3eb956d84da3ef367aa7def14a2735d05bd275cd54c0301f0d0d6c"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2d71cdefda9424adff9a3607ba5bbfc60ee972d73c21c7e3c19e71037574816"},
|
||||
{file = "lxml-6.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a2e76efbf8772add72d002d67a4c3d0958638696f541734304c7f28217a9cab"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6da7cd4f405fd7db56e51e96bff0865b9853ae70df0e6720624049da76bde2da"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b34339898bb556a2351a1830f88f751679f343eabf9cf05841c95b165152c9e7"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:51a5e4c61a4541bd1cd3ba74766d0c9b6c12d6a1a4964ef60026832aac8e79b3"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:21db1ec5525780fd07251636eb5f7acb84003e9382c72c18c542a87c416ade03"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4eb114a0754fd00075c12648d991ec7a4357f9cb873042cc9a77bf3a7e30c9db"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:7da298e1659e45d151b4028ad5c7974917e108afb48731f4ed785d02b6818994"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7bf61bc4345c1895221357af8f3e89f8c103d93156ef326532d35c707e2fb19d"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63b634facdfbad421d4b61c90735688465d4ab3a8853ac22c76ccac2baf98d97"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e380e85b93f148ad28ac15f8117e2fd8e5437aa7732d65e260134f83ce67911b"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-win32.whl", hash = "sha256:185efc2fed89cdd97552585c624d3c908f0464090f4b91f7d92f8ed2f3b18f54"},
|
||||
{file = "lxml-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:f97487996a39cb18278ca33f7be98198f278d0bc3c5d0fd4d7b3d63646ca3c8a"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85b14a4689d5cff426c12eefe750738648706ea2753b20c2f973b2a000d3d261"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f64ccf593916e93b8d36ed55401bb7fe9c7d5de3180ce2e10b08f82a8f397316"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:b372d10d17a701b0945f67be58fae4664fd056b85e0ff0fbc1e6c951cdbc0512"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a674c0948789e9136d69065cc28009c1b1874c6ea340253db58be7622ce6398f"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:edf6e4c8fe14dfe316939711e3ece3f9a20760aabf686051b537a7562f4da91a"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:048a930eb4572829604982e39a0c7289ab5dc8abc7fc9f5aabd6fbc08c154e93"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0b5fa5eda84057a4f1bbb4bb77a8c28ff20ae7ce211588d698ae453e13c6281"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:c352fc8f36f7e9727db17adbf93f82499457b3d7e5511368569b4c5bd155a922"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8db5dc617cb937ae17ff3403c3a70a7de9df4852a046f93e71edaec678f721d0"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:2181e4b1d07dde53986023482673c0f1fba5178ef800f9ab95ad791e8bdded6a"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3c98d5b24c6095e89e03d65d5c574705be3d49c0d8ca10c17a8a4b5201b72f5"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-win32.whl", hash = "sha256:04d67ceee6db4bcb92987ccb16e53bef6b42ced872509f333c04fb58a3315256"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e0b1520ef900e9ef62e392dd3d7ae4f5fa224d1dd62897a792cf353eb20b6cae"},
|
||||
{file = "lxml-6.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:e35e8aaaf3981489f42884b59726693de32dabfc438ac10ef4eb3409961fd402"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:dbdd7679a6f4f08152818043dbb39491d1af3332128b3752c3ec5cebc0011a72"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40442e2a4456e9910875ac12951476d36c0870dcb38a68719f8c4686609897c4"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db0efd6bae1c4730b9c863fc4f5f3c0fa3e8f05cae2c44ae141cb9dfc7d091dc"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ab542c91f5a47aaa58abdd8ea84b498e8e49fe4b883d67800017757a3eb78e8"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:013090383863b72c62a702d07678b658fa2567aa58d373d963cca245b017e065"},
|
||||
{file = "lxml-6.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c86df1c9af35d903d2b52d22ea3e66db8058d21dc0f59842ca5deb0595921141"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4337e4aec93b7c011f7ee2e357b0d30562edd1955620fdd4aeab6aacd90d43c5"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ae74f7c762270196d2dda56f8dd7309411f08a4084ff2dfcc0b095a218df2e06"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:059c4cbf3973a621b62ea3132934ae737da2c132a788e6cfb9b08d63a0ef73f9"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f090a9bc0ce8da51a5632092f98a7e7f84bca26f33d161a98b57f7fb0004ca"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9da022c14baeec36edfcc8daf0e281e2f55b950249a455776f0d1adeeada4734"},
|
||||
{file = "lxml-6.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a55da151d0b0c6ab176b4e761670ac0e2667817a1e0dadd04a01d0561a219349"},
|
||||
{file = "lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -1175,7 +1123,6 @@ cssselect = ["cssselect (>=0.7)"]
|
||||
html-clean = ["lxml_html_clean"]
|
||||
html5 = ["html5lib"]
|
||||
htmlsoup = ["BeautifulSoup4"]
|
||||
source = ["Cython (>=3.0.11,<3.1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "lxml-stubs"
|
||||
@@ -1194,14 +1141,14 @@ test = ["coverage[toml] (>=7.2.5)", "mypy (>=1.2.0)", "pytest (>=7.3.0)", "pytes
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "2.2.0"
|
||||
version = "3.0.0"
|
||||
description = "Python port of markdown-it. Markdown parsing, done right!"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
|
||||
{file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
|
||||
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
|
||||
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1214,7 +1161,7 @@ compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0
|
||||
linkify = ["linkify-it-py (>=1,<3)"]
|
||||
plugins = ["mdit-py-plugins"]
|
||||
profiling = ["gprof2dot"]
|
||||
rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
|
||||
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
|
||||
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
|
||||
|
||||
[[package]]
|
||||
@@ -1440,50 +1387,51 @@ docs = ["sphinx (>=8,<9)", "sphinx-autobuild"]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.13.0"
|
||||
version = "1.16.1"
|
||||
description = "Optional static typing for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"},
|
||||
{file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"},
|
||||
{file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"},
|
||||
{file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"},
|
||||
{file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"},
|
||||
{file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"},
|
||||
{file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4f0fed1022a63c6fec38f28b7fc77fca47fd490445c69d0a66266c59dd0b88a"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86042bbf9f5a05ea000d3203cf87aa9d0ccf9a01f73f71c58979eb9249f46d72"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea7469ee5902c95542bea7ee545f7006508c65c8c54b06dc2c92676ce526f3ea"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:352025753ef6a83cb9e7f2427319bb7875d1fdda8439d1e23de12ab164179574"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff9fa5b16e4c1364eb89a4d16bcda9987f05d39604e1e6c35378a2987c1aac2d"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:1256688e284632382f8f3b9e2123df7d279f603c561f099758e66dd6ed4e8bd6"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ddc91eb318c8751c69ddb200a5937f1232ee8efb4e64e9f4bc475a33719de438"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:87ff2c13d58bdc4bbe7dc0dedfe622c0f04e2cb2a492269f3b418df2de05c536"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a7cfb0fe29fe5a9841b7c8ee6dffb52382c45acdf68f032145b75620acfbd6f"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:051e1677689c9d9578b9c7f4d206d763f9bbd95723cd1416fad50db49d52f359"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d5d2309511cc56c021b4b4e462907c2b12f669b2dbeb68300110ec27723971be"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:4f58ac32771341e38a853c5d0ec0dfe27e18e27da9cdb8bbc882d2249c71a3ee"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7fc688329af6a287567f45cc1cefb9db662defeb14625213a5b7da6e692e2069"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e198ab3f55924c03ead626ff424cad1732d0d391478dfbf7bb97b34602395da"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09aa4f91ada245f0a45dbc47e548fd94e0dd5a8433e0114917dc3b526912a30c"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13c7cd5b1cb2909aa318a90fd1b7e31f17c50b242953e7dd58345b2a814f6383"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:58e07fb958bc5d752a280da0e890c538f1515b79a65757bbdc54252ba82e0b40"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:f895078594d918f93337a505f8add9bd654d1a24962b4c6ed9390e12531eb31b"},
|
||||
{file = "mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37"},
|
||||
{file = "mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=1.0.0"
|
||||
mypy_extensions = ">=1.0.0"
|
||||
pathspec = ">=0.9.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typing-extensions = ">=4.6.0"
|
||||
typing_extensions = ">=4.6.0"
|
||||
|
||||
[package.extras]
|
||||
dmypy = ["psutil (>=4.0)"]
|
||||
@@ -1506,18 +1454,18 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "mypy-zope"
|
||||
version = "1.0.12"
|
||||
version = "1.0.13"
|
||||
description = "Plugin for mypy to support zope interfaces"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "mypy_zope-1.0.12-py3-none-any.whl", hash = "sha256:f2ecf169f886fbc266e9339db0c2f3818528a7536b9bb4f5ece1d5854dc2f27c"},
|
||||
{file = "mypy_zope-1.0.12.tar.gz", hash = "sha256:d6f8f99eb5644885553b4ec7afc8d68f5daf412c9bf238ec3c36b65d97df6cbe"},
|
||||
{file = "mypy_zope-1.0.13-py3-none-any.whl", hash = "sha256:13740c4cbc910cca2c143c6709e1c483c991abeeeb7b629ad6f73d8ac1edad15"},
|
||||
{file = "mypy_zope-1.0.13.tar.gz", hash = "sha256:63fb4d035ea874baf280dc69e714dcde4bd2a4a4837a0fd8d90ce91bea510f99"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy = ">=1.0.0,<1.17.0"
|
||||
mypy = ">=1.0.0,<1.18.0"
|
||||
"zope.interface" = "*"
|
||||
"zope.schema" = "*"
|
||||
|
||||
@@ -1581,16 +1529,28 @@ files = [
|
||||
[package.extras]
|
||||
dev = ["jinja2"]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.12.1"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
|
||||
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "9.0.8"
|
||||
version = "9.0.10"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "phonenumbers-9.0.8-py2.py3-none-any.whl", hash = "sha256:53d357111c0ead0d6408ae443613b18d3a053431ca1ddf7e881457c0969afcf9"},
|
||||
{file = "phonenumbers-9.0.8.tar.gz", hash = "sha256:16f03f2cf65b5eee99ed25827d810febcab92b5d76f977e425fcd2e4ca6d4865"},
|
||||
{file = "phonenumbers-9.0.10-py2.py3-none-any.whl", hash = "sha256:13b12d269be1f2b363c9bc2868656a7e2e8b50f1a1cef629c75005da6c374c6b"},
|
||||
{file = "phonenumbers-9.0.10.tar.gz", hash = "sha256:c2d15a6a9d0534b14a7764f51246ada99563e263f65b80b0251d1a760ac4a1ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2322,19 +2282,20 @@ idna2008 = ["idna"]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "13.3.2"
|
||||
version = "14.0.0"
|
||||
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
python-versions = ">=3.8.0"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "rich-13.3.2-py3-none-any.whl", hash = "sha256:a104f37270bf677148d8acb07d33be1569eeee87e2d1beb286a4e9113caf6f2f"},
|
||||
{file = "rich-13.3.2.tar.gz", hash = "sha256:91954fe80cfb7985727a467ca98a7618e5dd15178cc2da10f553b36a93859001"},
|
||||
{file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"},
|
||||
{file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
markdown-it-py = ">=2.2.0,<3.0.0"
|
||||
markdown-it-py = ">=2.2.0"
|
||||
pygments = ">=2.13.0,<3.0.0"
|
||||
typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
@@ -2448,30 +2409,30 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.12.2"
|
||||
version = "0.12.7"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be"},
|
||||
{file = "ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e"},
|
||||
{file = "ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc"},
|
||||
{file = "ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922"},
|
||||
{file = "ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b"},
|
||||
{file = "ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d"},
|
||||
{file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1"},
|
||||
{file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4"},
|
||||
{file = "ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9"},
|
||||
{file = "ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da"},
|
||||
{file = "ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce"},
|
||||
{file = "ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d"},
|
||||
{file = "ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04"},
|
||||
{file = "ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342"},
|
||||
{file = "ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a"},
|
||||
{file = "ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639"},
|
||||
{file = "ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12"},
|
||||
{file = "ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e"},
|
||||
{file = "ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303"},
|
||||
{file = "ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb"},
|
||||
{file = "ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e"},
|
||||
{file = "ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606"},
|
||||
{file = "ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8"},
|
||||
{file = "ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa"},
|
||||
{file = "ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5"},
|
||||
{file = "ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4"},
|
||||
{file = "ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77"},
|
||||
{file = "ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f"},
|
||||
{file = "ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69"},
|
||||
{file = "ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2509,15 +2470,15 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.32.0"
|
||||
version = "2.34.1"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"sentry\""
|
||||
files = [
|
||||
{file = "sentry_sdk-2.32.0-py2.py3-none-any.whl", hash = "sha256:6cf51521b099562d7ce3606da928c473643abe99b00ce4cb5626ea735f4ec345"},
|
||||
{file = "sentry_sdk-2.32.0.tar.gz", hash = "sha256:9016c75d9316b0f6921ac14c8cd4fb938f26002430ac5be9945ab280f78bec6b"},
|
||||
{file = "sentry_sdk-2.34.1-py2.py3-none-any.whl", hash = "sha256:b7a072e1cdc5abc48101d5146e1ae680fa81fe886d8d95aaa25a0b450c818d32"},
|
||||
{file = "sentry_sdk-2.34.1.tar.gz", hash = "sha256:69274eb8c5c38562a544c3e9f68b5be0a43be4b697f5fd385bf98e4fbe672687"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2957,18 +2918,6 @@ files = [
|
||||
[package.dependencies]
|
||||
types-setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-commonmark"
|
||||
version = "0.9.2.20240106"
|
||||
description = "Typing stubs for commonmark"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types-commonmark-0.9.2.20240106.tar.gz", hash = "sha256:52a062b71766d6ab258fca2d8e19fb0853796e25ca9afa9d0f67a1e42c93479f"},
|
||||
{file = "types_commonmark-0.9.2.20240106-py3-none-any.whl", hash = "sha256:606d9de1e3a96cab0b1c0b6cccf4df099116148d1d864d115fde2e27ad6877c3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-html5lib"
|
||||
version = "1.1.11.20240228"
|
||||
@@ -2983,14 +2932,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-jsonschema"
|
||||
version = "4.24.0.20250528"
|
||||
version = "4.25.0.20250720"
|
||||
description = "Typing stubs for jsonschema"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_jsonschema-4.24.0.20250528-py3-none-any.whl", hash = "sha256:6a906b5ff73ac11c8d1e0b6c30a9693e1e4e1ab56c56c932b3a7e081b86d187b"},
|
||||
{file = "types_jsonschema-4.24.0.20250528.tar.gz", hash = "sha256:7e28c64e0ae7980eeb158105b20663fc6a6b8f81d5f86ea6614aa0014417bd1e"},
|
||||
{file = "types_jsonschema-4.25.0.20250720-py3-none-any.whl", hash = "sha256:7d7897c715310d8bf9ae27a2cedba78bbb09e4cad83ce06d2aa79b73a88941df"},
|
||||
{file = "types_jsonschema-4.25.0.20250720.tar.gz", hash = "sha256:765a3b6144798fe3161fd8cbe570a756ed3e8c0e5adb7c09693eb49faad39dbd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3034,14 +2983,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-psycopg2"
|
||||
version = "2.9.21.20250516"
|
||||
version = "2.9.21.20250718"
|
||||
description = "Typing stubs for psycopg2"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_psycopg2-2.9.21.20250516-py3-none-any.whl", hash = "sha256:2a9212d1e5e507017b31486ce8147634d06b85d652769d7a2d91d53cb4edbd41"},
|
||||
{file = "types_psycopg2-2.9.21.20250516.tar.gz", hash = "sha256:6721018279175cce10b9582202e2a2b4a0da667857ccf82a97691bdb5ecd610f"},
|
||||
{file = "types_psycopg2-2.9.21.20250718-py3-none-any.whl", hash = "sha256:bcf085d4293bda48f5943a46dadf0389b2f98f7e8007722f7e1c12ee0f541858"},
|
||||
{file = "types_psycopg2-2.9.21.20250718.tar.gz", hash = "sha256:dc09a97272ef67e739e57b9f4740b761208f4514257e311c0b05c8c7a37d04b4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3101,14 +3050,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.14.0"
|
||||
version = "4.14.1"
|
||||
description = "Backported and Experimental Type Hints for Python 3.9+"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"},
|
||||
{file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"},
|
||||
{file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"},
|
||||
{file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3404,4 +3353,4 @@ url-preview = ["lxml"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.9.0"
|
||||
content-hash = "6871453202e2ffc3c9ee5761481e0ef4ad99f08d9b8f24c6d6a64c2683683845"
|
||||
content-hash = "600a349d08dde732df251583094a121b5385eb43ae0c6ceff10dcf9749359446"
|
||||
|
||||
@@ -101,7 +101,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.133.0"
|
||||
version = "1.135.0rc2"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
@@ -178,8 +178,13 @@ signedjson = "^1.1.0"
|
||||
service-identity = ">=18.1.0"
|
||||
# Twisted 18.9 introduces some logger improvements that the structured
|
||||
# logger utilises
|
||||
Twisted = {extras = ["tls"], version = ">=18.9.0"}
|
||||
treq = ">=15.1"
|
||||
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
|
||||
# Twisted 21.2.0 introduces contextvar support.
|
||||
# We could likely bump this to 22.1 without making distro packagers'
|
||||
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
|
||||
# RHEL 9: 22.10)
|
||||
Twisted = {extras = ["tls"], version = ">=21.2.0"}
|
||||
treq = ">=21.5.0"
|
||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||
pyOpenSSL = ">=16.0.0"
|
||||
PyYAML = ">=5.3"
|
||||
@@ -195,7 +200,9 @@ pymacaroons = ">=0.13.0"
|
||||
msgpack = ">=0.5.2"
|
||||
phonenumbers = ">=8.2.0"
|
||||
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
||||
prometheus-client = ">=0.4.0"
|
||||
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
|
||||
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
|
||||
prometheus-client = ">=0.6.0"
|
||||
# we use `order`, which arrived in attrs 19.2.0.
|
||||
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
||||
attrs = ">=19.2.0,!=21.1.0"
|
||||
@@ -224,7 +231,7 @@ pydantic = ">=1.7.4, <3"
|
||||
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
||||
# `poetry build` do the right thing without this explicit dependency.
|
||||
#
|
||||
# This isn't really a dev-dependency, as `poetry install --no-dev` will fail,
|
||||
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
|
||||
# but the alternative is to add it to the main list of deps where it isn't
|
||||
# needed.
|
||||
setuptools_rust = ">=1.3"
|
||||
@@ -317,7 +324,7 @@ all = [
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
ruff = "0.12.2"
|
||||
ruff = "0.12.7"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
|
||||
@@ -326,7 +333,6 @@ lxml-stubs = ">=0.4.0"
|
||||
mypy = "*"
|
||||
mypy-zope = "*"
|
||||
types-bleach = ">=4.1.0"
|
||||
types-commonmark = ">=0.9.2"
|
||||
types-jsonschema = ">=3.2.0"
|
||||
types-netaddr = ">=0.8.0.6"
|
||||
types-opentracing = ">=2.4.2"
|
||||
@@ -349,7 +355,7 @@ idna = ">=2.5"
|
||||
click = ">=8.1.3"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
commonmark = ">=0.9.1"
|
||||
markdown-it-py = ">=3.0.0"
|
||||
pygithub = ">=1.55"
|
||||
# The following are executed as commands by the release script.
|
||||
twine = "*"
|
||||
@@ -375,13 +381,10 @@ build-backend = "poetry.core.masonry.api"
|
||||
# Skip unsupported platforms (by us or by Rust).
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||
# We skip:
|
||||
# - CPython 3.6, 3.7 and 3.8: EOLed
|
||||
# - PyPy 3.7 and 3.8: we only support Python 3.9+
|
||||
# - CPython and PyPy 3.8: EOLed
|
||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||
# - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/14259
|
||||
skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
|
||||
skip = "cp38* pp38* *-musllinux_i686"
|
||||
# Enable non-default builds.
|
||||
# "pypy" used to be included by default up until cibuildwheel 3.
|
||||
enable = "pypy"
|
||||
|
||||
@@ -23,7 +23,7 @@ name = "synapse.synapse_rust"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.63"
|
||||
base64 = "0.21.7"
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.6.0"
|
||||
headers = "0.4.0"
|
||||
http = "1.1.0"
|
||||
@@ -52,6 +52,7 @@ reqwest = { version = "0.12.15", default-features = false, features = [
|
||||
http-body-util = "0.1.3"
|
||||
futures = "0.3.31"
|
||||
tokio = { version = "1.44.2", features = ["rt", "rt-multi-thread"] }
|
||||
once_cell = "1.18.0"
|
||||
|
||||
[features]
|
||||
extension-module = ["pyo3/extension-module"]
|
||||
|
||||
@@ -54,6 +54,7 @@ enum EventInternalMetadataData {
|
||||
RecheckRedaction(bool),
|
||||
SoftFailed(bool),
|
||||
ProactivelySend(bool),
|
||||
PolicyServerSpammy(bool),
|
||||
Redacted(bool),
|
||||
TxnId(Box<str>),
|
||||
TokenId(i64),
|
||||
@@ -96,6 +97,13 @@ impl EventInternalMetadataData {
|
||||
.to_owned()
|
||||
.into_any(),
|
||||
),
|
||||
EventInternalMetadataData::PolicyServerSpammy(o) => (
|
||||
pyo3::intern!(py, "policy_server_spammy"),
|
||||
o.into_pyobject(py)
|
||||
.unwrap_infallible()
|
||||
.to_owned()
|
||||
.into_any(),
|
||||
),
|
||||
EventInternalMetadataData::Redacted(o) => (
|
||||
pyo3::intern!(py, "redacted"),
|
||||
o.into_pyobject(py)
|
||||
@@ -155,6 +163,11 @@ impl EventInternalMetadataData {
|
||||
.extract()
|
||||
.with_context(|| format!("'{key_str}' has invalid type"))?,
|
||||
),
|
||||
"policy_server_spammy" => EventInternalMetadataData::PolicyServerSpammy(
|
||||
value
|
||||
.extract()
|
||||
.with_context(|| format!("'{key_str}' has invalid type"))?,
|
||||
),
|
||||
"redacted" => EventInternalMetadataData::Redacted(
|
||||
value
|
||||
.extract()
|
||||
@@ -427,6 +440,17 @@ impl EventInternalMetadata {
|
||||
set_property!(self, ProactivelySend, obj);
|
||||
}
|
||||
|
||||
#[getter]
|
||||
fn get_policy_server_spammy(&self) -> PyResult<bool> {
|
||||
Ok(get_property_opt!(self, PolicyServerSpammy)
|
||||
.copied()
|
||||
.unwrap_or(false))
|
||||
}
|
||||
#[setter]
|
||||
fn set_policy_server_spammy(&mut self, obj: bool) {
|
||||
set_property!(self, PolicyServerSpammy, obj);
|
||||
}
|
||||
|
||||
#[getter]
|
||||
fn get_redacted(&self) -> PyResult<bool> {
|
||||
let bool = get_property!(self, Redacted)?;
|
||||
|
||||
@@ -12,58 +12,149 @@
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
use std::{collections::HashMap, future::Future, panic::AssertUnwindSafe, sync::LazyLock};
|
||||
use std::{collections::HashMap, future::Future};
|
||||
|
||||
use anyhow::Context;
|
||||
use futures::{FutureExt, TryStreamExt};
|
||||
use pyo3::{exceptions::PyException, prelude::*, types::PyString};
|
||||
use futures::TryStreamExt;
|
||||
use once_cell::sync::OnceCell;
|
||||
use pyo3::{create_exception, exceptions::PyException, prelude::*};
|
||||
use reqwest::RequestBuilder;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
use crate::errors::HttpResponseException;
|
||||
|
||||
/// The tokio runtime that we're using to run async Rust libs.
|
||||
static RUNTIME: LazyLock<Runtime> = LazyLock::new(|| {
|
||||
tokio::runtime::Builder::new_multi_thread()
|
||||
.worker_threads(4)
|
||||
.enable_all()
|
||||
.build()
|
||||
.unwrap()
|
||||
});
|
||||
create_exception!(
|
||||
synapse.synapse_rust.http_client,
|
||||
RustPanicError,
|
||||
PyException,
|
||||
"A panic which happened in a Rust future"
|
||||
);
|
||||
|
||||
/// A reference to the `Deferred` python class.
|
||||
static DEFERRED_CLASS: LazyLock<PyObject> = LazyLock::new(|| {
|
||||
Python::with_gil(|py| {
|
||||
py.import("twisted.internet.defer")
|
||||
.expect("module 'twisted.internet.defer' should be importable")
|
||||
.getattr("Deferred")
|
||||
.expect("module 'twisted.internet.defer' should have a 'Deferred' class")
|
||||
.unbind()
|
||||
})
|
||||
});
|
||||
impl RustPanicError {
|
||||
fn from_panic(panic_err: &(dyn std::any::Any + Send + 'static)) -> PyErr {
|
||||
// Apparently this is how you extract the panic message from a panic
|
||||
let panic_message = if let Some(str_slice) = panic_err.downcast_ref::<&str>() {
|
||||
str_slice
|
||||
} else if let Some(string) = panic_err.downcast_ref::<String>() {
|
||||
string
|
||||
} else {
|
||||
"unknown error"
|
||||
};
|
||||
Self::new_err(panic_message.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to the twisted `reactor`.
|
||||
static TWISTED_REACTOR: LazyLock<Py<PyModule>> = LazyLock::new(|| {
|
||||
Python::with_gil(|py| {
|
||||
py.import("twisted.internet.reactor")
|
||||
.expect("module 'twisted.internet.reactor' should be importable")
|
||||
.unbind()
|
||||
})
|
||||
});
|
||||
/// This is the name of the attribute where we store the runtime on the reactor
|
||||
static TOKIO_RUNTIME_ATTR: &str = "__synapse_rust_tokio_runtime";
|
||||
|
||||
/// A Python wrapper around a Tokio runtime.
|
||||
///
|
||||
/// This allows us to 'store' the runtime on the reactor instance, starting it
|
||||
/// when the reactor starts, and stopping it when the reactor shuts down.
|
||||
#[pyclass]
|
||||
struct PyTokioRuntime {
|
||||
runtime: Option<Runtime>,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl PyTokioRuntime {
|
||||
fn start(&mut self) -> PyResult<()> {
|
||||
// TODO: allow customization of the runtime like the number of threads
|
||||
let runtime = tokio::runtime::Builder::new_multi_thread()
|
||||
.worker_threads(4)
|
||||
.enable_all()
|
||||
.build()?;
|
||||
|
||||
self.runtime = Some(runtime);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn shutdown(&mut self) -> PyResult<()> {
|
||||
let runtime = self
|
||||
.runtime
|
||||
.take()
|
||||
.context("Runtime was already shutdown")?;
|
||||
|
||||
// Dropping the runtime will shut it down
|
||||
drop(runtime);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl PyTokioRuntime {
|
||||
/// Get the handle to the Tokio runtime, if it is running.
|
||||
fn handle(&self) -> PyResult<&tokio::runtime::Handle> {
|
||||
let handle = self
|
||||
.runtime
|
||||
.as_ref()
|
||||
.context("Tokio runtime is not running")?
|
||||
.handle();
|
||||
|
||||
Ok(handle)
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a handle to the Tokio runtime stored on the reactor instance, or create
|
||||
/// a new one.
|
||||
fn runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRuntime>> {
|
||||
if !reactor.hasattr(TOKIO_RUNTIME_ATTR)? {
|
||||
install_runtime(reactor)?;
|
||||
}
|
||||
|
||||
get_runtime(reactor)
|
||||
}
|
||||
|
||||
/// Install a new Tokio runtime on the reactor instance.
|
||||
fn install_runtime(reactor: &Bound<PyAny>) -> PyResult<()> {
|
||||
let py = reactor.py();
|
||||
let runtime = PyTokioRuntime { runtime: None };
|
||||
let runtime = runtime.into_pyobject(py)?;
|
||||
|
||||
// Attach the runtime to the reactor, starting it when the reactor is
|
||||
// running, stopping it when the reactor is shutting down
|
||||
reactor.call_method1("callWhenRunning", (runtime.getattr("start")?,))?;
|
||||
reactor.call_method1(
|
||||
"addSystemEventTrigger",
|
||||
("after", "shutdown", runtime.getattr("shutdown")?),
|
||||
)?;
|
||||
reactor.setattr(TOKIO_RUNTIME_ATTR, runtime)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a reference to a Tokio runtime handle stored on the reactor instance.
|
||||
fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRuntime>> {
|
||||
// This will raise if `TOKIO_RUNTIME_ATTR` is not set or if it is
|
||||
// not a `Runtime`. Careful that this could happen if the user sets it
|
||||
// manually, or if multiple versions of `pyo3-twisted` are used!
|
||||
let runtime: Bound<PyTokioRuntime> = reactor.getattr(TOKIO_RUNTIME_ATTR)?.extract()?;
|
||||
Ok(runtime.borrow())
|
||||
}
|
||||
|
||||
/// A reference to the `twisted.internet.defer` module.
|
||||
static DEFER: OnceCell<PyObject> = OnceCell::new();
|
||||
|
||||
/// Access to the `twisted.internet.defer` module.
|
||||
fn defer(py: Python<'_>) -> PyResult<&Bound<PyAny>> {
|
||||
Ok(DEFER
|
||||
.get_or_try_init(|| py.import("twisted.internet.defer").map(Into::into))?
|
||||
.bind(py))
|
||||
}
|
||||
|
||||
/// Called when registering modules with python.
|
||||
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||
let child_module: Bound<'_, PyModule> = PyModule::new(py, "http_client")?;
|
||||
child_module.add_class::<HttpClient>()?;
|
||||
|
||||
// Make sure we fail early if we can't build the lazy statics.
|
||||
LazyLock::force(&RUNTIME);
|
||||
LazyLock::force(&DEFERRED_CLASS);
|
||||
// Make sure we fail early if we can't load some modules
|
||||
defer(py)?;
|
||||
|
||||
m.add_submodule(&child_module)?;
|
||||
|
||||
// We need to manually add the module to sys.modules to make `from
|
||||
// synapse.synapse_rust import acl` work.
|
||||
// synapse.synapse_rust import http_client` work.
|
||||
py.import("sys")?
|
||||
.getattr("modules")?
|
||||
.set_item("synapse.synapse_rust.http_client", child_module)?;
|
||||
@@ -72,26 +163,24 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
|
||||
}
|
||||
|
||||
#[pyclass]
|
||||
#[derive(Clone)]
|
||||
struct HttpClient {
|
||||
client: reqwest::Client,
|
||||
reactor: PyObject,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl HttpClient {
|
||||
#[new]
|
||||
pub fn py_new(user_agent: &str) -> PyResult<HttpClient> {
|
||||
// The twisted reactor can only be imported after Synapse has been
|
||||
// imported, to allow Synapse to change the twisted reactor. If we try
|
||||
// and import the reactor too early twisted installs a default reactor,
|
||||
// which can't be replaced.
|
||||
LazyLock::force(&TWISTED_REACTOR);
|
||||
pub fn py_new(reactor: Bound<PyAny>, user_agent: &str) -> PyResult<HttpClient> {
|
||||
// Make sure the runtime gets installed
|
||||
let _ = runtime(&reactor)?;
|
||||
|
||||
Ok(HttpClient {
|
||||
client: reqwest::Client::builder()
|
||||
.user_agent(user_agent)
|
||||
.build()
|
||||
.context("building reqwest client")?,
|
||||
reactor: reactor.unbind(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -129,7 +218,7 @@ impl HttpClient {
|
||||
builder: RequestBuilder,
|
||||
response_limit: usize,
|
||||
) -> PyResult<Bound<'a, PyAny>> {
|
||||
create_deferred(py, async move {
|
||||
create_deferred(py, self.reactor.bind(py), async move {
|
||||
let response = builder.send().await.context("sending request")?;
|
||||
|
||||
let status = response.status();
|
||||
@@ -159,43 +248,51 @@ impl HttpClient {
|
||||
/// tokio runtime.
|
||||
///
|
||||
/// Does not handle deferred cancellation or contextvars.
|
||||
fn create_deferred<F, O>(py: Python, fut: F) -> PyResult<Bound<'_, PyAny>>
|
||||
fn create_deferred<'py, F, O>(
|
||||
py: Python<'py>,
|
||||
reactor: &Bound<'py, PyAny>,
|
||||
fut: F,
|
||||
) -> PyResult<Bound<'py, PyAny>>
|
||||
where
|
||||
F: Future<Output = PyResult<O>> + Send + 'static,
|
||||
for<'a> O: IntoPyObject<'a>,
|
||||
for<'a> O: IntoPyObject<'a> + Send + 'static,
|
||||
{
|
||||
let deferred = DEFERRED_CLASS.bind(py).call0()?;
|
||||
let deferred = defer(py)?.call_method0("Deferred")?;
|
||||
let deferred_callback = deferred.getattr("callback")?.unbind();
|
||||
let deferred_errback = deferred.getattr("errback")?.unbind();
|
||||
|
||||
RUNTIME.spawn(async move {
|
||||
// TODO: Is it safe to assert unwind safety here? I think so, as we
|
||||
// don't use anything that could be tainted by the panic afterwards.
|
||||
// Note that `.spawn(..)` asserts unwind safety on the future too.
|
||||
let res = AssertUnwindSafe(fut).catch_unwind().await;
|
||||
let rt = runtime(reactor)?;
|
||||
let handle = rt.handle()?;
|
||||
let task = handle.spawn(fut);
|
||||
|
||||
// Unbind the reactor so that we can pass it to the task
|
||||
let reactor = reactor.clone().unbind();
|
||||
handle.spawn(async move {
|
||||
let res = task.await;
|
||||
|
||||
Python::with_gil(move |py| {
|
||||
// Flatten the panic into standard python error
|
||||
let res = match res {
|
||||
Ok(r) => r,
|
||||
Err(panic_err) => {
|
||||
let panic_message = get_panic_message(&panic_err);
|
||||
Err(PyException::new_err(
|
||||
PyString::new(py, panic_message).unbind(),
|
||||
))
|
||||
}
|
||||
Err(join_err) => match join_err.try_into_panic() {
|
||||
Ok(panic_err) => Err(RustPanicError::from_panic(&panic_err)),
|
||||
Err(err) => Err(PyException::new_err(format!("Task cancelled: {err}"))),
|
||||
},
|
||||
};
|
||||
|
||||
// Re-bind the reactor
|
||||
let reactor = reactor.bind(py);
|
||||
|
||||
// Send the result to the deferred, via `.callback(..)` or `.errback(..)`
|
||||
match res {
|
||||
Ok(obj) => {
|
||||
TWISTED_REACTOR
|
||||
.call_method(py, "callFromThread", (deferred_callback, obj), None)
|
||||
reactor
|
||||
.call_method("callFromThread", (deferred_callback, obj), None)
|
||||
.expect("callFromThread should not fail"); // There's nothing we can really do with errors here
|
||||
}
|
||||
Err(err) => {
|
||||
TWISTED_REACTOR
|
||||
.call_method(py, "callFromThread", (deferred_errback, err), None)
|
||||
reactor
|
||||
.call_method("callFromThread", (deferred_errback, err), None)
|
||||
.expect("callFromThread should not fail"); // There's nothing we can really do with errors here
|
||||
}
|
||||
}
|
||||
@@ -204,15 +301,3 @@ where
|
||||
|
||||
Ok(deferred)
|
||||
}
|
||||
|
||||
/// Try and get the panic message out of the panic
|
||||
fn get_panic_message<'a>(panic_err: &'a (dyn std::any::Any + Send + 'static)) -> &'a str {
|
||||
// Apparently this is how you extract the panic message from a panic
|
||||
if let Some(str_slice) = panic_err.downcast_ref::<&str>() {
|
||||
str_slice
|
||||
} else if let Some(string) = panic_err.downcast_ref::<String>() {
|
||||
string
|
||||
} else {
|
||||
"unknown error"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.133/synapse-config.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.135/synapse-config.schema.json
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
@@ -629,6 +629,33 @@ properties:
|
||||
password: mypassword
|
||||
ssh_priv_key_path: CONFDIR/id_rsa
|
||||
ssh_pub_key_path: CONFDIR/id_rsa.pub
|
||||
http_proxy:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
Proxy server to use for HTTP requests.
|
||||
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md).
|
||||
examples:
|
||||
- "http://USERNAME:PASSWORD@10.0.1.1:8080/"
|
||||
https_proxy:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
Proxy server to use for HTTPS requests.
|
||||
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md).
|
||||
examples:
|
||||
- "http://USERNAME:PASSWORD@proxy.example.com:8080/"
|
||||
no_proxy_hosts:
|
||||
type: array
|
||||
description: >-
|
||||
List of hosts, IP addresses, or IP ranges in CIDR format which should not use the
|
||||
proxy. Synapse will directly connect to these hosts.
|
||||
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md).
|
||||
examples:
|
||||
- - master.hostname.example.com
|
||||
- 10.1.0.0/16
|
||||
- 172.30.0.0/16
|
||||
dummy_events_threshold:
|
||||
type: integer
|
||||
description: >-
|
||||
@@ -2179,9 +2206,8 @@ properties:
|
||||
with a short timeout, or restarting several different delayed events all
|
||||
at once) without the risk of being ratelimited.
|
||||
default:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
examples:
|
||||
- per_second: 2.0
|
||||
burst_count: 20.0
|
||||
@@ -2202,6 +2228,17 @@ properties:
|
||||
examples:
|
||||
- per_second: 2.0
|
||||
burst_count: 20.0
|
||||
rc_room_creation:
|
||||
$ref: "#/$defs/rc"
|
||||
description: >-
|
||||
Sets rate limits for how often users are able to create rooms.
|
||||
default:
|
||||
per_user:
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
examples:
|
||||
- per_second: 1.0
|
||||
burst_count: 5.0
|
||||
federation_rr_transactions_per_room_per_second:
|
||||
type: integer
|
||||
description: >-
|
||||
@@ -2335,6 +2372,30 @@ properties:
|
||||
default: 50M
|
||||
examples:
|
||||
- 60M
|
||||
media_upload_limits:
|
||||
type: array
|
||||
description: >-
|
||||
A list of media upload limits defining how much data a given user can
|
||||
upload in a given time period.
|
||||
|
||||
|
||||
An empty list means no limits are applied.
|
||||
default: []
|
||||
items:
|
||||
time_period:
|
||||
type: "#/$defs/duration"
|
||||
description: >-
|
||||
The time period over which the limit applies. Required.
|
||||
max_size:
|
||||
type: "#/$defs/bytes"
|
||||
description: >-
|
||||
Amount of data that can be uploaded in the time period by the user.
|
||||
Required.
|
||||
examples:
|
||||
- - time_period: 1h
|
||||
max_size: 100M
|
||||
- time_period: 1w
|
||||
max_size: 500M
|
||||
max_image_pixels:
|
||||
$ref: "#/$defs/bytes"
|
||||
description: Maximum number of pixels that will be thumbnailed.
|
||||
@@ -2668,6 +2729,21 @@ properties:
|
||||
default: null
|
||||
examples:
|
||||
- YOUR_PUBLIC_KEY
|
||||
recaptcha_public_key_path:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
An alternative to [`recaptcha_public_key`](#recaptcha_public_key): allows
|
||||
the public key to be specified in an external file.
|
||||
|
||||
|
||||
The file should be a plain text file, containing only the public key.
|
||||
Synapse reads the public key from the given file once at startup.
|
||||
|
||||
|
||||
_Added in Synapse 1.135.0._
|
||||
default: null
|
||||
examples:
|
||||
- /path/to/key/file
|
||||
recaptcha_private_key:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
@@ -2676,6 +2752,21 @@ properties:
|
||||
default: null
|
||||
examples:
|
||||
- YOUR_PRIVATE_KEY
|
||||
recaptcha_private_key_path:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
An alternative to [`recaptcha_private_key`](#recaptcha_private_key):
|
||||
allows the private key to be specified in an external file.
|
||||
|
||||
|
||||
The file should be a plain text file, containing only the private key.
|
||||
Synapse reads the private key from the given file once at startup.
|
||||
|
||||
|
||||
_Added in Synapse 1.135.0._
|
||||
default: null
|
||||
examples:
|
||||
- /path/to/key/file
|
||||
enable_registration_captcha:
|
||||
type: boolean
|
||||
description: >-
|
||||
@@ -4665,8 +4756,15 @@ properties:
|
||||
enabled:
|
||||
type: boolean
|
||||
description: >-
|
||||
Defines whether users can search the user directory. If false then
|
||||
Defines whether users can search the user directory. If `false` then
|
||||
empty responses are returned to all queries.
|
||||
|
||||
|
||||
*Warning: While the homeserver may determine which subset of users are
|
||||
searched, the Matrix specification requires homeservers to include (at
|
||||
minimum) users visible in public rooms and users sharing a room with
|
||||
the requester. Using `false` improves performance but violates this
|
||||
requirement.*
|
||||
default: true
|
||||
search_all_users:
|
||||
type: boolean
|
||||
@@ -5323,6 +5421,9 @@ properties:
|
||||
push_rules:
|
||||
type: string
|
||||
description: Name of a worker assigned to the `push_rules` stream.
|
||||
device_lists:
|
||||
type: string
|
||||
description: Name of a worker assigned to the `device_lists` stream.
|
||||
default: {}
|
||||
examples:
|
||||
- events: worker1
|
||||
|
||||
@@ -473,6 +473,10 @@ def section(prop: str, values: dict) -> str:
|
||||
|
||||
|
||||
def main() -> None:
|
||||
# For Windows: reconfigure the terminal to be UTF-8 for `print()` calls.
|
||||
if sys.platform == "win32":
|
||||
sys.stdout.reconfigure(encoding="utf-8")
|
||||
|
||||
def usage(err_msg: str) -> int:
|
||||
script_name = (sys.argv[:1] or ["__main__.py"])[0]
|
||||
print(err_msg, file=sys.stderr)
|
||||
@@ -485,7 +489,10 @@ def main() -> None:
|
||||
exit(usage("Too many arguments."))
|
||||
if not (filepath := (sys.argv[1:] or [""])[0]):
|
||||
exit(usage("No schema file provided."))
|
||||
with open(filepath) as f:
|
||||
with open(filepath, "r", encoding="utf-8") as f:
|
||||
# Note: Windows requires that we specify the encoding otherwise it uses
|
||||
# things like CP-1251, which can cause explosions.
|
||||
# See https://github.com/yaml/pyyaml/issues/123 for more info.
|
||||
return yaml.safe_load(f)
|
||||
|
||||
schema = read_json_file_arg()
|
||||
|
||||
@@ -28,8 +28,13 @@ from typing import Callable, Optional, Tuple, Type, Union
|
||||
import mypy.types
|
||||
from mypy.erasetype import remove_instance_last_known_values
|
||||
from mypy.errorcodes import ErrorCode
|
||||
from mypy.nodes import ARG_NAMED_OPT, TempNode, Var
|
||||
from mypy.plugin import FunctionSigContext, MethodSigContext, Plugin
|
||||
from mypy.nodes import ARG_NAMED_OPT, ListExpr, NameExpr, TempNode, TupleExpr, Var
|
||||
from mypy.plugin import (
|
||||
FunctionLike,
|
||||
FunctionSigContext,
|
||||
MethodSigContext,
|
||||
Plugin,
|
||||
)
|
||||
from mypy.typeops import bind_self
|
||||
from mypy.types import (
|
||||
AnyType,
|
||||
@@ -43,8 +48,28 @@ from mypy.types import (
|
||||
UnionType,
|
||||
)
|
||||
|
||||
PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL = ErrorCode(
|
||||
"missing-server-name-label",
|
||||
"`SERVER_NAME_LABEL` required in metric",
|
||||
category="per-homeserver-tenant-metrics",
|
||||
)
|
||||
|
||||
|
||||
class SynapsePlugin(Plugin):
|
||||
def get_function_signature_hook(
|
||||
self, fullname: str
|
||||
) -> Optional[Callable[[FunctionSigContext], FunctionLike]]:
|
||||
if fullname in (
|
||||
"prometheus_client.metrics.Counter",
|
||||
"prometheus_client.metrics.Histogram",
|
||||
"prometheus_client.metrics.Gauge",
|
||||
# TODO: Add other prometheus_client metrics that need checking as we
|
||||
# refactor, see https://github.com/element-hq/synapse/issues/18592
|
||||
):
|
||||
return check_prometheus_metric_instantiation
|
||||
|
||||
return None
|
||||
|
||||
def get_method_signature_hook(
|
||||
self, fullname: str
|
||||
) -> Optional[Callable[[MethodSigContext], CallableType]]:
|
||||
@@ -65,6 +90,85 @@ class SynapsePlugin(Plugin):
|
||||
return None
|
||||
|
||||
|
||||
def check_prometheus_metric_instantiation(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `prometheus_client` metrics include the `SERVER_NAME_LABEL` label
|
||||
when instantiated.
|
||||
|
||||
This is important because we support multiple Synapse instances running in the same
|
||||
process, where all metrics share a single global `REGISTRY`. The `server_name` label
|
||||
ensures metrics are correctly separated by homeserver.
|
||||
|
||||
There are also some metrics that apply at the process level, such as CPU usage,
|
||||
Python garbage collection, and Twisted reactor tick time, which shouldn't have the
|
||||
`SERVER_NAME_LABEL`. In those cases, use a type ignore comment to disable the
|
||||
check, e.g. `# type: ignore[missing-server-name-label]`.
|
||||
"""
|
||||
# The true signature, this isn't being modified so this is what will be returned.
|
||||
signature: CallableType = ctx.default_signature
|
||||
|
||||
# Sanity check the arguments are still as expected in this version of
|
||||
# `prometheus_client`. ex. `Counter(name, documentation, labelnames, ...)`
|
||||
#
|
||||
# `signature.arg_names` should be: ["name", "documentation", "labelnames", ...]
|
||||
if len(signature.arg_names) < 3 or signature.arg_names[2] != "labelnames":
|
||||
ctx.api.fail(
|
||||
f"Expected the 3rd argument of {signature.name} to be 'labelnames', but got "
|
||||
f"{signature.arg_names[2]}",
|
||||
ctx.context,
|
||||
)
|
||||
return signature
|
||||
|
||||
# Ensure mypy is passing the correct number of arguments because we are doing some
|
||||
# dirty indexing into `ctx.args` later on.
|
||||
assert len(ctx.args) == len(signature.arg_names), (
|
||||
f"Expected the list of arguments in the {signature.name} signature ({len(signature.arg_names)})"
|
||||
f"to match the number of arguments from the function signature context ({len(ctx.args)})"
|
||||
)
|
||||
|
||||
# Check if the `labelnames` argument includes `SERVER_NAME_LABEL`
|
||||
#
|
||||
# `ctx.args` should look like this:
|
||||
# ```
|
||||
# [
|
||||
# [StrExpr("name")],
|
||||
# [StrExpr("documentation")],
|
||||
# [ListExpr([StrExpr("label1"), StrExpr("label2")])]
|
||||
# ...
|
||||
# ]
|
||||
# ```
|
||||
labelnames_arg_expression = ctx.args[2][0] if len(ctx.args[2]) > 0 else None
|
||||
if isinstance(labelnames_arg_expression, (ListExpr, TupleExpr)):
|
||||
# Check if the `labelnames` argument includes the `server_name` label (`SERVER_NAME_LABEL`).
|
||||
for labelname_expression in labelnames_arg_expression.items:
|
||||
if (
|
||||
isinstance(labelname_expression, NameExpr)
|
||||
and labelname_expression.fullname == "synapse.metrics.SERVER_NAME_LABEL"
|
||||
):
|
||||
# Found the `SERVER_NAME_LABEL`, all good!
|
||||
break
|
||||
else:
|
||||
ctx.api.fail(
|
||||
f"Expected {signature.name} to include `SERVER_NAME_LABEL` in the list of labels. "
|
||||
"If this is a process-level metric (vs homeserver-level), use a type ignore comment "
|
||||
"to disable this check.",
|
||||
ctx.context,
|
||||
code=PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL,
|
||||
)
|
||||
else:
|
||||
ctx.api.fail(
|
||||
f"Expected the `labelnames` argument of {signature.name} to be a list of label names "
|
||||
f"(including `SERVER_NAME_LABEL`), but got {labelnames_arg_expression}. "
|
||||
"If this is a process-level metric (vs homeserver-level), use a type ignore comment "
|
||||
"to disable this check.",
|
||||
ctx.context,
|
||||
code=PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL,
|
||||
)
|
||||
return signature
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def _get_true_return_type(signature: CallableType) -> mypy.types.Type:
|
||||
"""
|
||||
Get the "final" return type of a callable which might return an Awaitable/Deferred.
|
||||
|
||||
@@ -36,11 +36,11 @@ from typing import Any, List, Match, Optional, Union
|
||||
|
||||
import attr
|
||||
import click
|
||||
import commonmark
|
||||
import git
|
||||
from click.exceptions import ClickException
|
||||
from git import GitCommandError, Repo
|
||||
from github import BadCredentialsException, Github
|
||||
from markdown_it import MarkdownIt
|
||||
from packaging import version
|
||||
|
||||
|
||||
@@ -851,7 +851,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
|
||||
# First we parse the changelog so that we can split it into sections based
|
||||
# on the release headings.
|
||||
ast = commonmark.Parser().parse(changes)
|
||||
tokens = MarkdownIt().parse(changes)
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class VersionSection:
|
||||
@@ -862,19 +862,22 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
end_line: Optional[int] = None # Is none if its the last entry
|
||||
|
||||
headings: List[VersionSection] = []
|
||||
for node, _ in ast.walker():
|
||||
# We look for all text nodes that are in a level 1 heading.
|
||||
if node.t != "text":
|
||||
for i, token in enumerate(tokens):
|
||||
# We look for level 1 headings (h1 tags).
|
||||
if token.type != "heading_open" or token.tag != "h1":
|
||||
continue
|
||||
|
||||
if node.parent.t != "heading" or node.parent.level != 1:
|
||||
continue
|
||||
# The next token should be an inline token containing the heading text
|
||||
if i + 1 < len(tokens) and tokens[i + 1].type == "inline":
|
||||
heading_text = tokens[i + 1].content
|
||||
# The map property contains [line_begin, line_end] (0-based)
|
||||
start_line = token.map[0] if token.map else 0
|
||||
|
||||
# If we have a previous heading then we update its `end_line`.
|
||||
if headings:
|
||||
headings[-1].end_line = node.parent.sourcepos[0][0] - 1
|
||||
# If we have a previous heading then we update its `end_line`.
|
||||
if headings:
|
||||
headings[-1].end_line = start_line
|
||||
|
||||
headings.append(VersionSection(node.literal, node.parent.sourcepos[0][0] - 1))
|
||||
headings.append(VersionSection(heading_text, start_line))
|
||||
|
||||
changes_by_line = changes.split("\n")
|
||||
|
||||
|
||||
@@ -45,16 +45,6 @@ if py_version < (3, 9):
|
||||
|
||||
# Allow using the asyncio reactor via env var.
|
||||
if strtobool(os.environ.get("SYNAPSE_ASYNC_IO_REACTOR", "0")):
|
||||
from incremental import Version
|
||||
|
||||
import twisted
|
||||
|
||||
# We need a bugfix that is included in Twisted 21.2.0:
|
||||
# https://twistedmatrix.com/trac/ticket/9787
|
||||
if twisted.version < Version("Twisted", 21, 2, 0):
|
||||
print("Using asyncio reactor requires Twisted>=21.2.0")
|
||||
sys.exit(1)
|
||||
|
||||
import asyncio
|
||||
|
||||
from twisted.internet import asyncioreactor
|
||||
|
||||
@@ -48,6 +48,7 @@ if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
root_validator,
|
||||
validator,
|
||||
)
|
||||
from pydantic.v1.error_wrappers import ErrorWrapper
|
||||
@@ -68,6 +69,7 @@ else:
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
root_validator,
|
||||
validator,
|
||||
)
|
||||
from pydantic.error_wrappers import ErrorWrapper
|
||||
@@ -92,4 +94,5 @@ __all__ = (
|
||||
"StrictStr",
|
||||
"ValidationError",
|
||||
"validator",
|
||||
"root_validator",
|
||||
)
|
||||
|
||||
@@ -29,19 +29,21 @@ import attr
|
||||
|
||||
from synapse.config._base import (
|
||||
Config,
|
||||
ConfigError,
|
||||
RootConfig,
|
||||
find_config_files,
|
||||
read_config_files,
|
||||
)
|
||||
from synapse.config.database import DatabaseConfig
|
||||
from synapse.config.server import ServerConfig
|
||||
from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
||||
from synapse.storage.engines import create_engine
|
||||
|
||||
|
||||
class ReviewConfig(RootConfig):
|
||||
"A config class that just pulls out the database config"
|
||||
"A config class that just pulls out the server and database config"
|
||||
|
||||
config_classes = [DatabaseConfig]
|
||||
config_classes = [ServerConfig, DatabaseConfig]
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
@@ -148,6 +150,10 @@ def main() -> None:
|
||||
config_dict = read_config_files(config_files)
|
||||
config.parse_config_dict(config_dict, "", "")
|
||||
|
||||
server_name = config.server.server_name
|
||||
if not isinstance(server_name, str):
|
||||
raise ConfigError("Must be a string", ("server_name",))
|
||||
|
||||
since_ms = time.time() * 1000 - Config.parse_duration(config_args.since)
|
||||
exclude_users_with_email = config_args.exclude_emails
|
||||
exclude_users_with_appservice = config_args.exclude_app_service
|
||||
@@ -159,7 +165,12 @@ def main() -> None:
|
||||
|
||||
engine = create_engine(database_config.config)
|
||||
|
||||
with make_conn(database_config, engine, "review_recent_signups") as db_conn:
|
||||
with make_conn(
|
||||
db_config=database_config,
|
||||
engine=engine,
|
||||
default_txn_name="review_recent_signups",
|
||||
server_name=server_name,
|
||||
) as db_conn:
|
||||
# This generates a type of Cursor, not LoggingTransaction.
|
||||
user_infos = get_recent_users(
|
||||
db_conn.cursor(),
|
||||
|
||||
@@ -136,6 +136,7 @@ BOOLEAN_COLUMNS = {
|
||||
"has_known_state",
|
||||
"is_encrypted",
|
||||
],
|
||||
"thread_subscriptions": ["subscribed", "automatic"],
|
||||
"users": ["shadow_banned", "approved", "locked", "suspended"],
|
||||
"un_partial_stated_event_stream": ["rejection_status_changed"],
|
||||
"users_who_share_rooms": ["share_private"],
|
||||
@@ -190,13 +191,18 @@ APPEND_ONLY_TABLES = [
|
||||
"users",
|
||||
]
|
||||
|
||||
# These tables declare their id column with "PRIMARY KEY AUTOINCREMENT" on sqlite side
|
||||
# and with "PRIMARY KEY GENERATED ALWAYS AS IDENTITY" on postgres side. This creates an
|
||||
# implicit sequence that needs its value to be migrated separately. Additionally,
|
||||
# inserting on postgres side needs to use the "OVERRIDING SYSTEM VALUE" modifier.
|
||||
AUTOINCREMENT_TABLES = {
|
||||
"sliding_sync_connections",
|
||||
"sliding_sync_connection_positions",
|
||||
"sliding_sync_connection_required_state",
|
||||
"state_groups_pending_deletion",
|
||||
}
|
||||
|
||||
IGNORED_TABLES = {
|
||||
# Porting the auto generated sequence in this table is non-trivial.
|
||||
# None of the entries in this list are mandatory for Synapse to keep working.
|
||||
# If state group disk space is an issue after the port, the
|
||||
# `mark_unreferenced_state_groups_for_deletion_bg_update` background task can be run again.
|
||||
"state_groups_pending_deletion",
|
||||
# We don't port these tables, as they're a faff and we can regenerate
|
||||
# them anyway.
|
||||
"user_directory",
|
||||
@@ -284,11 +290,17 @@ class Store(
|
||||
return self.db_pool.runInteraction("execute_sql", r)
|
||||
|
||||
def insert_many_txn(
|
||||
self, txn: LoggingTransaction, table: str, headers: List[str], rows: List[Tuple]
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
table: str,
|
||||
headers: List[str],
|
||||
rows: List[Tuple],
|
||||
override_system_value: bool = False,
|
||||
) -> None:
|
||||
sql = "INSERT INTO %s (%s) VALUES (%s)" % (
|
||||
sql = "INSERT INTO %s (%s) %s VALUES (%s)" % (
|
||||
table,
|
||||
", ".join(k for k in headers),
|
||||
"OVERRIDING SYSTEM VALUE" if override_system_value else "",
|
||||
", ".join("%s" for _ in headers),
|
||||
)
|
||||
|
||||
@@ -532,7 +544,13 @@ class Porter:
|
||||
|
||||
def insert(txn: LoggingTransaction) -> None:
|
||||
assert headers is not None
|
||||
self.postgres_store.insert_many_txn(txn, table, headers[1:], rows)
|
||||
self.postgres_store.insert_many_txn(
|
||||
txn,
|
||||
table,
|
||||
headers[1:],
|
||||
rows,
|
||||
override_system_value=table in AUTOINCREMENT_TABLES,
|
||||
)
|
||||
|
||||
self.postgres_store.db_pool.simple_update_one_txn(
|
||||
txn,
|
||||
@@ -654,8 +672,14 @@ class Porter:
|
||||
engine = create_engine(db_config.config)
|
||||
|
||||
hs = MockHomeserver(self.hs_config)
|
||||
server_name = hs.hostname
|
||||
|
||||
with make_conn(db_config, engine, "portdb") as db_conn:
|
||||
with make_conn(
|
||||
db_config=db_config,
|
||||
engine=engine,
|
||||
default_txn_name="portdb",
|
||||
server_name=server_name,
|
||||
) as db_conn:
|
||||
engine.check_database(
|
||||
db_conn, allow_outdated_version=allow_outdated_version
|
||||
)
|
||||
@@ -884,6 +908,19 @@ class Porter:
|
||||
],
|
||||
)
|
||||
|
||||
await self._setup_autoincrement_sequence(
|
||||
"sliding_sync_connection_positions", "connection_position"
|
||||
)
|
||||
await self._setup_autoincrement_sequence(
|
||||
"sliding_sync_connection_required_state", "required_state_id"
|
||||
)
|
||||
await self._setup_autoincrement_sequence(
|
||||
"sliding_sync_connections", "connection_key"
|
||||
)
|
||||
await self._setup_autoincrement_sequence(
|
||||
"state_groups_pending_deletion", "sequence_number"
|
||||
)
|
||||
|
||||
# Step 3. Get tables.
|
||||
self.progress.set_state("Fetching tables")
|
||||
sqlite_tables = await self.sqlite_store.db_pool.simple_select_onecol(
|
||||
@@ -1216,6 +1253,49 @@ class Porter:
|
||||
"_setup_%s" % (sequence_name,), r
|
||||
)
|
||||
|
||||
async def _setup_autoincrement_sequence(
|
||||
self,
|
||||
sqlite_table_name: str,
|
||||
sqlite_id_column_name: str,
|
||||
) -> None:
|
||||
"""Set a sequence to the correct value. Use where id column was declared with PRIMARY KEY AUTOINCREMENT."""
|
||||
seq_name = await self._pg_get_serial_sequence(
|
||||
sqlite_table_name, sqlite_id_column_name
|
||||
)
|
||||
if seq_name is None:
|
||||
raise Exception(
|
||||
"implicit sequence not found for table " + sqlite_table_name
|
||||
)
|
||||
|
||||
seq_value = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="sqlite_sequence",
|
||||
keyvalues={"name": sqlite_table_name},
|
||||
retcol="seq",
|
||||
allow_none=True,
|
||||
)
|
||||
if seq_value is None:
|
||||
return
|
||||
|
||||
def r(txn: LoggingTransaction) -> None:
|
||||
sql = "ALTER SEQUENCE %s RESTART WITH" % (seq_name,)
|
||||
txn.execute(sql + " %s", (seq_value + 1,))
|
||||
|
||||
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (seq_name,), r)
|
||||
|
||||
async def _pg_get_serial_sequence(self, table: str, column: str) -> Optional[str]:
|
||||
"""Returns the name of the postgres sequence associated with a column, or NULL."""
|
||||
|
||||
def r(txn: LoggingTransaction) -> Optional[str]:
|
||||
txn.execute("SELECT pg_get_serial_sequence('%s', '%s')" % (table, column))
|
||||
result = txn.fetchone()
|
||||
if not result:
|
||||
return None
|
||||
return result[0]
|
||||
|
||||
return await self.postgres_store.db_pool.runInteraction(
|
||||
"_pg_get_serial_sequence", r
|
||||
)
|
||||
|
||||
async def _setup_auth_chain_sequence(self) -> None:
|
||||
curr_chain_id: Optional[
|
||||
int
|
||||
|
||||
@@ -53,6 +53,7 @@ class MockHomeserver(HomeServer):
|
||||
|
||||
|
||||
def run_background_updates(hs: HomeServer) -> None:
|
||||
server_name = hs.hostname
|
||||
main = hs.get_datastores().main
|
||||
state = hs.get_datastores().state
|
||||
|
||||
@@ -66,7 +67,11 @@ def run_background_updates(hs: HomeServer) -> None:
|
||||
def run() -> None:
|
||||
# Apply all background updates on the database.
|
||||
defer.ensureDeferred(
|
||||
run_as_background_process("background_updates", run_background_updates)
|
||||
run_as_background_process(
|
||||
"background_updates",
|
||||
server_name,
|
||||
run_background_updates,
|
||||
)
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
|
||||
@@ -172,7 +172,7 @@ class BaseAuth:
|
||||
"""
|
||||
|
||||
# It's ok if the app service is trying to use the sender from their registration
|
||||
if app_service.sender == user_id:
|
||||
if app_service.sender.to_string() == user_id:
|
||||
pass
|
||||
# Check to make sure the app service is allowed to control the user
|
||||
elif not app_service.is_interested_in_user(user_id):
|
||||
|
||||
@@ -47,6 +47,7 @@ from synapse.logging.opentracing import (
|
||||
inject_request_headers,
|
||||
start_active_span,
|
||||
)
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.synapse_rust.http_client import HttpClient
|
||||
from synapse.types import Requester, UserID, create_requester
|
||||
from synapse.util import json_decoder
|
||||
@@ -62,7 +63,7 @@ logger = logging.getLogger(__name__)
|
||||
introspection_response_timer = Histogram(
|
||||
"synapse_api_auth_delegated_introspection_response",
|
||||
"Time taken to get a response for an introspection request",
|
||||
["code"],
|
||||
labelnames=["code", SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
|
||||
@@ -176,6 +177,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
assert self._config.client_id, "No client_id provided"
|
||||
assert auth_method is not None, "Invalid client_auth_method provided"
|
||||
|
||||
self.server_name = hs.hostname
|
||||
self._clock = hs.get_clock()
|
||||
self._http_client = hs.get_proxied_http_client()
|
||||
self._hostname = hs.hostname
|
||||
@@ -183,7 +185,8 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
|
||||
|
||||
self._rust_http_client = HttpClient(
|
||||
user_agent=self._http_client.user_agent.decode("utf8")
|
||||
reactor=hs.get_reactor(),
|
||||
user_agent=self._http_client.user_agent.decode("utf8"),
|
||||
)
|
||||
|
||||
# # Token Introspection Cache
|
||||
@@ -206,8 +209,9 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# In this case, the device still exists and it's not the end of the world for
|
||||
# the old access token to continue working for a short time.
|
||||
self._introspection_cache: ResponseCache[str] = ResponseCache(
|
||||
self._clock,
|
||||
"token_introspection",
|
||||
clock=self._clock,
|
||||
name="token_introspection",
|
||||
server_name=self.server_name,
|
||||
timeout_ms=120_000,
|
||||
# don't log because the keys are access tokens
|
||||
enable_logging=False,
|
||||
@@ -338,17 +342,23 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels(e.code).observe(end_time - start_time)
|
||||
introspection_response_timer.labels(
|
||||
code=e.code, **{SERVER_NAME_LABEL: self.server_name}
|
||||
).observe(end_time - start_time)
|
||||
raise
|
||||
except Exception:
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels("ERR").observe(end_time - start_time)
|
||||
introspection_response_timer.labels(
|
||||
code="ERR", **{SERVER_NAME_LABEL: self.server_name}
|
||||
).observe(end_time - start_time)
|
||||
raise
|
||||
|
||||
logger.debug("Fetched token from MAS")
|
||||
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels(200).observe(end_time - start_time)
|
||||
introspection_response_timer.labels(
|
||||
code=200, **{SERVER_NAME_LABEL: self.server_name}
|
||||
).observe(end_time - start_time)
|
||||
|
||||
resp = json_decoder.decode(resp_body.decode("utf-8"))
|
||||
|
||||
@@ -366,6 +376,12 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
async def is_server_admin(self, requester: Requester) -> bool:
|
||||
return "urn:synapse:admin:*" in requester.scope
|
||||
|
||||
def _is_access_token_the_admin_token(self, token: str) -> bool:
|
||||
admin_token = self._admin_token()
|
||||
if admin_token is None:
|
||||
return False
|
||||
return token == admin_token
|
||||
|
||||
async def get_user_by_req(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
@@ -431,7 +447,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
requester = await self.get_user_by_access_token(access_token, allow_expired)
|
||||
|
||||
# Do not record requests from MAS using the virtual `__oidc_admin` user.
|
||||
if access_token != self._admin_token():
|
||||
if not self._is_access_token_the_admin_token(access_token):
|
||||
await self._record_request(request, requester)
|
||||
|
||||
if not allow_guest and requester.is_guest:
|
||||
@@ -467,17 +483,29 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
|
||||
raise UnrecognizedRequestError(code=404)
|
||||
|
||||
def is_request_using_the_admin_token(self, request: SynapseRequest) -> bool:
|
||||
"""
|
||||
Check if the request is using the admin token.
|
||||
|
||||
Args:
|
||||
request: The request to check.
|
||||
|
||||
Returns:
|
||||
True if the request is using the admin token, False otherwise.
|
||||
"""
|
||||
access_token = self.get_access_token_from_request(request)
|
||||
return self._is_access_token_the_admin_token(access_token)
|
||||
|
||||
async def get_user_by_access_token(
|
||||
self,
|
||||
token: str,
|
||||
allow_expired: bool = False,
|
||||
) -> Requester:
|
||||
admin_token = self._admin_token()
|
||||
if admin_token is not None and token == admin_token:
|
||||
if self._is_access_token_the_admin_token(token):
|
||||
# XXX: This is a temporary solution so that the admin API can be called by
|
||||
# the OIDC provider. This will be removed once we have OIDC client
|
||||
# credentials grant support in matrix-authentication-service.
|
||||
logger.info("Admin toked used")
|
||||
logger.info("Admin token used")
|
||||
# XXX: that user doesn't exist and won't be provisioned.
|
||||
# This is mostly fine for admin calls, but we should also think about doing
|
||||
# requesters without a user_id.
|
||||
|
||||
@@ -262,6 +262,11 @@ class EventContentFields:
|
||||
|
||||
TOMBSTONE_SUCCESSOR_ROOM: Final = "replacement_room"
|
||||
|
||||
# Used in m.room.topic events.
|
||||
TOPIC: Final = "topic"
|
||||
M_TOPIC: Final = "m.topic"
|
||||
M_TEXT: Final = "m.text"
|
||||
|
||||
|
||||
class EventUnsignedContentFields:
|
||||
"""Fields found inside the 'unsigned' data on events"""
|
||||
@@ -270,6 +275,13 @@ class EventUnsignedContentFields:
|
||||
MEMBERSHIP: Final = "membership"
|
||||
|
||||
|
||||
class MTextFields:
|
||||
"""Fields found inside m.text content blocks."""
|
||||
|
||||
BODY: Final = "body"
|
||||
MIMETYPE: Final = "mimetype"
|
||||
|
||||
|
||||
class RoomTypes:
|
||||
"""Understood values of the room_type field of m.room.create events."""
|
||||
|
||||
@@ -290,6 +302,9 @@ class AccountDataTypes:
|
||||
MSC4155_INVITE_PERMISSION_CONFIG: Final = (
|
||||
"org.matrix.msc4155.invite_permission_config"
|
||||
)
|
||||
# Synapse-specific behaviour. See "Client-Server API Extensions" documentation
|
||||
# in Admin API for more information.
|
||||
SYNAPSE_ADMIN_CLIENT_CONFIG: Final = "io.element.synapse.admin_client_config"
|
||||
|
||||
|
||||
class HistoryVisibility:
|
||||
|
||||
@@ -75,7 +75,7 @@ from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import install_gc_manager, register_threadpool
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.metrics.jemalloc import setup_jemalloc_stats
|
||||
from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers
|
||||
from synapse.module_api.callbacks.third_party_event_rules_callbacks import (
|
||||
@@ -286,6 +286,16 @@ def register_start(
|
||||
def listen_metrics(bind_addresses: StrCollection, port: int) -> None:
|
||||
"""
|
||||
Start Prometheus metrics server.
|
||||
|
||||
This method runs the metrics server on a different port, in a different thread to
|
||||
Synapse. This can make it more resilient to heavy load in Synapse causing metric
|
||||
requests to be slow or timeout.
|
||||
|
||||
Even though `start_http_server_prometheus(...)` uses `threading.Thread` behind the
|
||||
scenes (where all threads share the GIL and only one thread can execute Python
|
||||
bytecode at a time), this still works because the metrics thread can preempt the
|
||||
Twisted reactor thread between bytecode boundaries and the metrics thread gets
|
||||
scheduled with roughly equal priority to the Twisted reactor thread.
|
||||
"""
|
||||
from prometheus_client import start_http_server as start_http_server_prometheus
|
||||
|
||||
@@ -293,32 +303,9 @@ def listen_metrics(bind_addresses: StrCollection, port: int) -> None:
|
||||
|
||||
for host in bind_addresses:
|
||||
logger.info("Starting metrics listener on %s:%d", host, port)
|
||||
_set_prometheus_client_use_created_metrics(False)
|
||||
start_http_server_prometheus(port, addr=host, registry=RegistryProxy)
|
||||
|
||||
|
||||
def _set_prometheus_client_use_created_metrics(new_value: bool) -> None:
|
||||
"""
|
||||
Sets whether prometheus_client should expose `_created`-suffixed metrics for
|
||||
all gauges, histograms and summaries.
|
||||
There is no programmatic way to disable this without poking at internals;
|
||||
the proper way is to use an environment variable which prometheus_client
|
||||
loads at import time.
|
||||
|
||||
The motivation for disabling these `_created` metrics is that they're
|
||||
a waste of space as they're not useful but they take up space in Prometheus.
|
||||
"""
|
||||
|
||||
import prometheus_client.metrics
|
||||
|
||||
if hasattr(prometheus_client.metrics, "_use_created"):
|
||||
prometheus_client.metrics._use_created = new_value
|
||||
else:
|
||||
logger.error(
|
||||
"Can't disable `_created` metrics in prometheus_client (brittle hack broken?)"
|
||||
)
|
||||
|
||||
|
||||
def listen_manhole(
|
||||
bind_addresses: StrCollection,
|
||||
port: int,
|
||||
@@ -525,6 +512,7 @@ async def start(hs: "HomeServer") -> None:
|
||||
Args:
|
||||
hs: homeserver instance
|
||||
"""
|
||||
server_name = hs.hostname
|
||||
reactor = hs.get_reactor()
|
||||
|
||||
# We want to use a separate thread pool for the resolver so that large
|
||||
@@ -537,22 +525,34 @@ async def start(hs: "HomeServer") -> None:
|
||||
)
|
||||
|
||||
# Register the threadpools with our metrics.
|
||||
register_threadpool("default", reactor.getThreadPool())
|
||||
register_threadpool("gai_resolver", resolver_threadpool)
|
||||
register_threadpool(
|
||||
name="default", server_name=server_name, threadpool=reactor.getThreadPool()
|
||||
)
|
||||
register_threadpool(
|
||||
name="gai_resolver", server_name=server_name, threadpool=resolver_threadpool
|
||||
)
|
||||
|
||||
# Set up the SIGHUP machinery.
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
|
||||
@wrap_as_background_process("sighup")
|
||||
async def handle_sighup(*args: Any, **kwargs: Any) -> None:
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
# we're not using systemd.
|
||||
sdnotify(b"RELOADING=1")
|
||||
def handle_sighup(*args: Any, **kwargs: Any) -> "defer.Deferred[None]":
|
||||
async def _handle_sighup(*args: Any, **kwargs: Any) -> None:
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
# we're not using systemd.
|
||||
sdnotify(b"RELOADING=1")
|
||||
|
||||
for i, args, kwargs in _sighup_callbacks:
|
||||
i(*args, **kwargs)
|
||||
for i, args, kwargs in _sighup_callbacks:
|
||||
i(*args, **kwargs)
|
||||
|
||||
sdnotify(b"READY=1")
|
||||
sdnotify(b"READY=1")
|
||||
|
||||
return run_as_background_process(
|
||||
"sighup",
|
||||
server_name,
|
||||
_handle_sighup,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# We defer running the sighup handlers until next reactor tick. This
|
||||
# is so that we're in a sane state, e.g. flushing the logs may fail
|
||||
|
||||
@@ -104,6 +104,9 @@ from synapse.storage.databases.main.stats import StatsStore
|
||||
from synapse.storage.databases.main.stream import StreamWorkerStore
|
||||
from synapse.storage.databases.main.tags import TagsWorkerStore
|
||||
from synapse.storage.databases.main.task_scheduler import TaskSchedulerWorkerStore
|
||||
from synapse.storage.databases.main.thread_subscriptions import (
|
||||
ThreadSubscriptionsWorkerStore,
|
||||
)
|
||||
from synapse.storage.databases.main.transactions import TransactionWorkerStore
|
||||
from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore
|
||||
from synapse.storage.databases.main.user_directory import UserDirectoryStore
|
||||
@@ -118,7 +121,6 @@ class GenericWorkerStore(
|
||||
# FIXME(https://github.com/matrix-org/synapse/issues/3714): We need to add
|
||||
# UserDirectoryStore as we write directly rather than going via the correct worker.
|
||||
UserDirectoryStore,
|
||||
StatsStore,
|
||||
UIAuthWorkerStore,
|
||||
EndToEndRoomKeyStore,
|
||||
PresenceStore,
|
||||
@@ -133,6 +135,7 @@ class GenericWorkerStore(
|
||||
KeyStore,
|
||||
RoomWorkerStore,
|
||||
DirectoryWorkerStore,
|
||||
ThreadSubscriptionsWorkerStore,
|
||||
PushRulesWorkerStore,
|
||||
ApplicationServiceTransactionWorkerStore,
|
||||
ApplicationServiceWorkerStore,
|
||||
@@ -154,6 +157,7 @@ class GenericWorkerStore(
|
||||
StreamWorkerStore,
|
||||
EventsWorkerStore,
|
||||
RegistrationWorkerStore,
|
||||
StatsStore,
|
||||
SearchStore,
|
||||
TransactionWorkerStore,
|
||||
LockStore,
|
||||
|
||||
@@ -26,7 +26,12 @@ from typing import TYPE_CHECKING, List, Mapping, Sized, Tuple
|
||||
|
||||
from prometheus_client import Gauge
|
||||
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.metrics.background_process_metrics import (
|
||||
run_as_background_process,
|
||||
)
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.constants import ONE_HOUR_SECONDS, ONE_MINUTE_SECONDS
|
||||
|
||||
@@ -53,138 +58,158 @@ Phone home stats are sent every 3 hours
|
||||
_stats_process: List[Tuple[int, "resource.struct_rusage"]] = []
|
||||
|
||||
# Gauges to expose monthly active user control metrics
|
||||
current_mau_gauge = Gauge("synapse_admin_mau_current", "Current MAU")
|
||||
current_mau_gauge = Gauge(
|
||||
"synapse_admin_mau_current",
|
||||
"Current MAU",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
)
|
||||
current_mau_by_service_gauge = Gauge(
|
||||
"synapse_admin_mau_current_mau_by_service",
|
||||
"Current MAU by service",
|
||||
["app_service"],
|
||||
labelnames=["app_service", SERVER_NAME_LABEL],
|
||||
)
|
||||
max_mau_gauge = Gauge(
|
||||
"synapse_admin_mau_max",
|
||||
"MAU Limit",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
)
|
||||
max_mau_gauge = Gauge("synapse_admin_mau_max", "MAU Limit")
|
||||
registered_reserved_users_mau_gauge = Gauge(
|
||||
"synapse_admin_mau_registered_reserved_users",
|
||||
"Registered users with reserved threepids",
|
||||
labelnames=[SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
|
||||
@wrap_as_background_process("phone_stats_home")
|
||||
async def phone_stats_home(
|
||||
def phone_stats_home(
|
||||
hs: "HomeServer",
|
||||
stats: JsonDict,
|
||||
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
|
||||
) -> None:
|
||||
"""Collect usage statistics and send them to the configured endpoint.
|
||||
) -> "defer.Deferred[None]":
|
||||
server_name = hs.hostname
|
||||
|
||||
Args:
|
||||
hs: the HomeServer object to use for gathering usage data.
|
||||
stats: the dict in which to store the statistics sent to the configured
|
||||
endpoint. Mostly used in tests to figure out the data that is supposed to
|
||||
be sent.
|
||||
stats_process: statistics about resource usage of the process.
|
||||
"""
|
||||
async def _phone_stats_home(
|
||||
hs: "HomeServer",
|
||||
stats: JsonDict,
|
||||
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
|
||||
) -> None:
|
||||
"""Collect usage statistics and send them to the configured endpoint.
|
||||
|
||||
logger.info("Gathering stats for reporting")
|
||||
now = int(hs.get_clock().time())
|
||||
# Ensure the homeserver has started.
|
||||
assert hs.start_time is not None
|
||||
uptime = int(now - hs.start_time)
|
||||
if uptime < 0:
|
||||
uptime = 0
|
||||
Args:
|
||||
hs: the HomeServer object to use for gathering usage data.
|
||||
stats: the dict in which to store the statistics sent to the configured
|
||||
endpoint. Mostly used in tests to figure out the data that is supposed to
|
||||
be sent.
|
||||
stats_process: statistics about resource usage of the process.
|
||||
"""
|
||||
|
||||
#
|
||||
# Performance statistics. Keep this early in the function to maintain reliability of `test_performance_100` test.
|
||||
#
|
||||
old = stats_process[0]
|
||||
new = (now, resource.getrusage(resource.RUSAGE_SELF))
|
||||
stats_process[0] = new
|
||||
logger.info("Gathering stats for reporting")
|
||||
now = int(hs.get_clock().time())
|
||||
# Ensure the homeserver has started.
|
||||
assert hs.start_time is not None
|
||||
uptime = int(now - hs.start_time)
|
||||
if uptime < 0:
|
||||
uptime = 0
|
||||
|
||||
# Get RSS in bytes
|
||||
stats["memory_rss"] = new[1].ru_maxrss
|
||||
#
|
||||
# Performance statistics. Keep this early in the function to maintain reliability of `test_performance_100` test.
|
||||
#
|
||||
old = stats_process[0]
|
||||
new = (now, resource.getrusage(resource.RUSAGE_SELF))
|
||||
stats_process[0] = new
|
||||
|
||||
# Get CPU time in % of a single core, not % of all cores
|
||||
used_cpu_time = (new[1].ru_utime + new[1].ru_stime) - (
|
||||
old[1].ru_utime + old[1].ru_stime
|
||||
)
|
||||
if used_cpu_time == 0 or new[0] == old[0]:
|
||||
stats["cpu_average"] = 0
|
||||
else:
|
||||
stats["cpu_average"] = math.floor(used_cpu_time / (new[0] - old[0]) * 100)
|
||||
# Get RSS in bytes
|
||||
stats["memory_rss"] = new[1].ru_maxrss
|
||||
|
||||
#
|
||||
# General statistics
|
||||
#
|
||||
|
||||
store = hs.get_datastores().main
|
||||
common_metrics = await hs.get_common_usage_metrics_manager().get_metrics()
|
||||
|
||||
stats["homeserver"] = hs.config.server.server_name
|
||||
stats["server_context"] = hs.config.server.server_context
|
||||
stats["timestamp"] = now
|
||||
stats["uptime_seconds"] = uptime
|
||||
version = sys.version_info
|
||||
stats["python_version"] = "{}.{}.{}".format(
|
||||
version.major, version.minor, version.micro
|
||||
)
|
||||
stats["total_users"] = await store.count_all_users()
|
||||
|
||||
total_nonbridged_users = await store.count_nonbridged_users()
|
||||
stats["total_nonbridged_users"] = total_nonbridged_users
|
||||
|
||||
daily_user_type_results = await store.count_daily_user_type()
|
||||
for name, count in daily_user_type_results.items():
|
||||
stats["daily_user_type_" + name] = count
|
||||
|
||||
room_count = await store.get_room_count()
|
||||
stats["total_room_count"] = room_count
|
||||
|
||||
stats["daily_active_users"] = common_metrics.daily_active_users
|
||||
stats["monthly_active_users"] = await store.count_monthly_users()
|
||||
daily_active_e2ee_rooms = await store.count_daily_active_e2ee_rooms()
|
||||
stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms
|
||||
stats["daily_e2ee_messages"] = await store.count_daily_e2ee_messages()
|
||||
daily_sent_e2ee_messages = await store.count_daily_sent_e2ee_messages()
|
||||
stats["daily_sent_e2ee_messages"] = daily_sent_e2ee_messages
|
||||
stats["daily_active_rooms"] = await store.count_daily_active_rooms()
|
||||
stats["daily_messages"] = await store.count_daily_messages()
|
||||
daily_sent_messages = await store.count_daily_sent_messages()
|
||||
stats["daily_sent_messages"] = daily_sent_messages
|
||||
|
||||
r30v2_results = await store.count_r30v2_users()
|
||||
for name, count in r30v2_results.items():
|
||||
stats["r30v2_users_" + name] = count
|
||||
|
||||
stats["cache_factor"] = hs.config.caches.global_factor
|
||||
stats["event_cache_size"] = hs.config.caches.event_cache_size
|
||||
|
||||
#
|
||||
# Database version
|
||||
#
|
||||
|
||||
# This only reports info about the *main* database.
|
||||
stats["database_engine"] = store.db_pool.engine.module.__name__
|
||||
stats["database_server_version"] = store.db_pool.engine.server_version
|
||||
|
||||
#
|
||||
# Logging configuration
|
||||
#
|
||||
synapse_logger = logging.getLogger("synapse")
|
||||
log_level = synapse_logger.getEffectiveLevel()
|
||||
stats["log_level"] = logging.getLevelName(log_level)
|
||||
|
||||
logger.info(
|
||||
"Reporting stats to %s: %s", hs.config.metrics.report_stats_endpoint, stats
|
||||
)
|
||||
try:
|
||||
await hs.get_proxied_http_client().put_json(
|
||||
hs.config.metrics.report_stats_endpoint, stats
|
||||
# Get CPU time in % of a single core, not % of all cores
|
||||
used_cpu_time = (new[1].ru_utime + new[1].ru_stime) - (
|
||||
old[1].ru_utime + old[1].ru_stime
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("Error reporting stats: %s", e)
|
||||
if used_cpu_time == 0 or new[0] == old[0]:
|
||||
stats["cpu_average"] = 0
|
||||
else:
|
||||
stats["cpu_average"] = math.floor(used_cpu_time / (new[0] - old[0]) * 100)
|
||||
|
||||
#
|
||||
# General statistics
|
||||
#
|
||||
|
||||
store = hs.get_datastores().main
|
||||
common_metrics = await hs.get_common_usage_metrics_manager().get_metrics()
|
||||
|
||||
stats["homeserver"] = hs.config.server.server_name
|
||||
stats["server_context"] = hs.config.server.server_context
|
||||
stats["timestamp"] = now
|
||||
stats["uptime_seconds"] = uptime
|
||||
version = sys.version_info
|
||||
stats["python_version"] = "{}.{}.{}".format(
|
||||
version.major, version.minor, version.micro
|
||||
)
|
||||
stats["total_users"] = await store.count_all_users()
|
||||
|
||||
total_nonbridged_users = await store.count_nonbridged_users()
|
||||
stats["total_nonbridged_users"] = total_nonbridged_users
|
||||
|
||||
daily_user_type_results = await store.count_daily_user_type()
|
||||
for name, count in daily_user_type_results.items():
|
||||
stats["daily_user_type_" + name] = count
|
||||
|
||||
room_count = await store.get_room_count()
|
||||
stats["total_room_count"] = room_count
|
||||
|
||||
stats["daily_active_users"] = common_metrics.daily_active_users
|
||||
stats["monthly_active_users"] = await store.count_monthly_users()
|
||||
daily_active_e2ee_rooms = await store.count_daily_active_e2ee_rooms()
|
||||
stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms
|
||||
stats["daily_e2ee_messages"] = await store.count_daily_e2ee_messages()
|
||||
daily_sent_e2ee_messages = await store.count_daily_sent_e2ee_messages()
|
||||
stats["daily_sent_e2ee_messages"] = daily_sent_e2ee_messages
|
||||
stats["daily_active_rooms"] = await store.count_daily_active_rooms()
|
||||
stats["daily_messages"] = await store.count_daily_messages()
|
||||
daily_sent_messages = await store.count_daily_sent_messages()
|
||||
stats["daily_sent_messages"] = daily_sent_messages
|
||||
|
||||
r30v2_results = await store.count_r30v2_users()
|
||||
for name, count in r30v2_results.items():
|
||||
stats["r30v2_users_" + name] = count
|
||||
|
||||
stats["cache_factor"] = hs.config.caches.global_factor
|
||||
stats["event_cache_size"] = hs.config.caches.event_cache_size
|
||||
|
||||
#
|
||||
# Database version
|
||||
#
|
||||
|
||||
# This only reports info about the *main* database.
|
||||
stats["database_engine"] = store.db_pool.engine.module.__name__
|
||||
stats["database_server_version"] = store.db_pool.engine.server_version
|
||||
|
||||
#
|
||||
# Logging configuration
|
||||
#
|
||||
synapse_logger = logging.getLogger("synapse")
|
||||
log_level = synapse_logger.getEffectiveLevel()
|
||||
stats["log_level"] = logging.getLevelName(log_level)
|
||||
|
||||
logger.info(
|
||||
"Reporting stats to %s: %s", hs.config.metrics.report_stats_endpoint, stats
|
||||
)
|
||||
try:
|
||||
await hs.get_proxied_http_client().put_json(
|
||||
hs.config.metrics.report_stats_endpoint, stats
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("Error reporting stats: %s", e)
|
||||
|
||||
return run_as_background_process(
|
||||
"phone_stats_home", server_name, _phone_stats_home, hs, stats, stats_process
|
||||
)
|
||||
|
||||
|
||||
def start_phone_stats_home(hs: "HomeServer") -> None:
|
||||
"""
|
||||
Start the background tasks which report phone home stats.
|
||||
"""
|
||||
server_name = hs.hostname
|
||||
clock = hs.get_clock()
|
||||
|
||||
stats: JsonDict = {}
|
||||
@@ -210,25 +235,39 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
|
||||
)
|
||||
hs.get_datastores().main.reap_monthly_active_users()
|
||||
|
||||
@wrap_as_background_process("generate_monthly_active_users")
|
||||
async def generate_monthly_active_users() -> None:
|
||||
current_mau_count = 0
|
||||
current_mau_count_by_service: Mapping[str, int] = {}
|
||||
reserved_users: Sized = ()
|
||||
store = hs.get_datastores().main
|
||||
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
|
||||
current_mau_count = await store.get_monthly_active_count()
|
||||
current_mau_count_by_service = (
|
||||
await store.get_monthly_active_count_by_service()
|
||||
def generate_monthly_active_users() -> "defer.Deferred[None]":
|
||||
async def _generate_monthly_active_users() -> None:
|
||||
current_mau_count = 0
|
||||
current_mau_count_by_service: Mapping[str, int] = {}
|
||||
reserved_users: Sized = ()
|
||||
store = hs.get_datastores().main
|
||||
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
|
||||
current_mau_count = await store.get_monthly_active_count()
|
||||
current_mau_count_by_service = (
|
||||
await store.get_monthly_active_count_by_service()
|
||||
)
|
||||
reserved_users = await store.get_registered_reserved_users()
|
||||
current_mau_gauge.labels(**{SERVER_NAME_LABEL: server_name}).set(
|
||||
float(current_mau_count)
|
||||
)
|
||||
reserved_users = await store.get_registered_reserved_users()
|
||||
current_mau_gauge.set(float(current_mau_count))
|
||||
|
||||
for app_service, count in current_mau_count_by_service.items():
|
||||
current_mau_by_service_gauge.labels(app_service).set(float(count))
|
||||
for app_service, count in current_mau_count_by_service.items():
|
||||
current_mau_by_service_gauge.labels(
|
||||
app_service=app_service, **{SERVER_NAME_LABEL: server_name}
|
||||
).set(float(count))
|
||||
|
||||
registered_reserved_users_mau_gauge.set(float(len(reserved_users)))
|
||||
max_mau_gauge.set(float(hs.config.server.max_mau_value))
|
||||
registered_reserved_users_mau_gauge.labels(
|
||||
**{SERVER_NAME_LABEL: server_name}
|
||||
).set(float(len(reserved_users)))
|
||||
max_mau_gauge.labels(**{SERVER_NAME_LABEL: server_name}).set(
|
||||
float(hs.config.server.max_mau_value)
|
||||
)
|
||||
|
||||
return run_as_background_process(
|
||||
"generate_monthly_active_users",
|
||||
server_name,
|
||||
_generate_monthly_active_users,
|
||||
)
|
||||
|
||||
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
|
||||
generate_monthly_active_users()
|
||||
|
||||
@@ -78,7 +78,7 @@ class ApplicationService:
|
||||
self,
|
||||
token: str,
|
||||
id: str,
|
||||
sender: str,
|
||||
sender: UserID,
|
||||
url: Optional[str] = None,
|
||||
namespaces: Optional[JsonDict] = None,
|
||||
hs_token: Optional[str] = None,
|
||||
@@ -96,6 +96,8 @@ class ApplicationService:
|
||||
self.hs_token = hs_token
|
||||
# The full Matrix ID for this application service's sender.
|
||||
self.sender = sender
|
||||
# The application service user should be part of the server's domain.
|
||||
self.server_name = sender.domain # nb must be called this for @cached
|
||||
self.namespaces = self._check_namespaces(namespaces)
|
||||
self.id = id
|
||||
self.ip_range_whitelist = ip_range_whitelist
|
||||
@@ -223,7 +225,7 @@ class ApplicationService:
|
||||
"""
|
||||
return (
|
||||
# User is the appservice's configured sender_localpart user
|
||||
user_id == self.sender
|
||||
user_id == self.sender.to_string()
|
||||
# User is in the appservice's user namespace
|
||||
or self.is_user_in_namespace(user_id)
|
||||
)
|
||||
@@ -347,7 +349,7 @@ class ApplicationService:
|
||||
def is_exclusive_user(self, user_id: str) -> bool:
|
||||
return (
|
||||
self._is_exclusive(ApplicationService.NS_USERS, user_id)
|
||||
or user_id == self.sender
|
||||
or user_id == self.sender.to_string()
|
||||
)
|
||||
|
||||
def is_interested_in_protocol(self, protocol: str) -> bool:
|
||||
|
||||
@@ -48,6 +48,7 @@ from synapse.events import EventBase
|
||||
from synapse.events.utils import SerializeEventConfig, serialize_event
|
||||
from synapse.http.client import SimpleHttpClient, is_unknown_endpoint
|
||||
from synapse.logging import opentracing
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.types import DeviceListUpdates, JsonDict, JsonMapping, ThirdPartyInstanceID
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
|
||||
@@ -59,29 +60,31 @@ logger = logging.getLogger(__name__)
|
||||
sent_transactions_counter = Counter(
|
||||
"synapse_appservice_api_sent_transactions",
|
||||
"Number of /transactions/ requests sent",
|
||||
["service"],
|
||||
labelnames=["service", SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
failed_transactions_counter = Counter(
|
||||
"synapse_appservice_api_failed_transactions",
|
||||
"Number of /transactions/ requests that failed to send",
|
||||
["service"],
|
||||
labelnames=["service", SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
sent_events_counter = Counter(
|
||||
"synapse_appservice_api_sent_events", "Number of events sent to the AS", ["service"]
|
||||
"synapse_appservice_api_sent_events",
|
||||
"Number of events sent to the AS",
|
||||
labelnames=["service", SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
sent_ephemeral_counter = Counter(
|
||||
"synapse_appservice_api_sent_ephemeral",
|
||||
"Number of ephemeral events sent to the AS",
|
||||
["service"],
|
||||
labelnames=["service", SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
sent_todevice_counter = Counter(
|
||||
"synapse_appservice_api_sent_todevice",
|
||||
"Number of todevice messages sent to the AS",
|
||||
["service"],
|
||||
labelnames=["service", SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
HOUR_IN_MS = 60 * 60 * 1000
|
||||
@@ -126,11 +129,15 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.server_name = hs.hostname
|
||||
self.clock = hs.get_clock()
|
||||
self.config = hs.config.appservice
|
||||
|
||||
self.protocol_meta_cache: ResponseCache[Tuple[str, str]] = ResponseCache(
|
||||
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||
clock=hs.get_clock(),
|
||||
name="as_protocol_meta",
|
||||
server_name=self.server_name,
|
||||
timeout_ms=HOUR_IN_MS,
|
||||
)
|
||||
|
||||
def _get_headers(self, service: "ApplicationService") -> Dict[bytes, List[bytes]]:
|
||||
@@ -378,6 +385,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
"left": list(device_list_summary.left),
|
||||
}
|
||||
|
||||
labels = {"service": service.id, SERVER_NAME_LABEL: self.server_name}
|
||||
try:
|
||||
args = None
|
||||
if self.config.use_appservice_legacy_authorization:
|
||||
@@ -395,10 +403,10 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
service.url,
|
||||
[event.get("event_id") for event in events],
|
||||
)
|
||||
sent_transactions_counter.labels(service.id).inc()
|
||||
sent_events_counter.labels(service.id).inc(len(serialized_events))
|
||||
sent_ephemeral_counter.labels(service.id).inc(len(ephemeral))
|
||||
sent_todevice_counter.labels(service.id).inc(len(to_device_messages))
|
||||
sent_transactions_counter.labels(**labels).inc()
|
||||
sent_events_counter.labels(**labels).inc(len(serialized_events))
|
||||
sent_ephemeral_counter.labels(**labels).inc(len(ephemeral))
|
||||
sent_todevice_counter.labels(**labels).inc(len(to_device_messages))
|
||||
return True
|
||||
except CodeMessageException as e:
|
||||
logger.warning(
|
||||
@@ -417,7 +425,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
ex.args,
|
||||
exc_info=logger.isEnabledFor(logging.DEBUG),
|
||||
)
|
||||
failed_transactions_counter.labels(service.id).inc()
|
||||
failed_transactions_counter.labels(**labels).inc()
|
||||
return False
|
||||
|
||||
async def claim_client_keys(
|
||||
@@ -555,6 +563,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
)
|
||||
and service.is_interested_in_user(e.state_key)
|
||||
),
|
||||
# Appservices are considered 'trusted' by the admin and should have
|
||||
# applicable metadata on their events.
|
||||
include_admin_metadata=True,
|
||||
),
|
||||
)
|
||||
for e in events
|
||||
|
||||
@@ -103,18 +103,16 @@ MAX_TO_DEVICE_MESSAGES_PER_TRANSACTION = 100
|
||||
|
||||
|
||||
class ApplicationServiceScheduler:
|
||||
"""Public facing API for this module. Does the required DI to tie the
|
||||
components together. This also serves as the "event_pool", which in this
|
||||
"""
|
||||
Public facing API for this module. Does the required dependency injection (DI) to
|
||||
tie the components together. This also serves as the "event_pool", which in this
|
||||
case is a simple array.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.clock = hs.get_clock()
|
||||
self.txn_ctrl = _TransactionController(hs)
|
||||
self.store = hs.get_datastores().main
|
||||
self.as_api = hs.get_application_service_api()
|
||||
|
||||
self.txn_ctrl = _TransactionController(self.clock, self.store, self.as_api)
|
||||
self.queuer = _ServiceQueuer(self.txn_ctrl, self.clock, hs)
|
||||
self.queuer = _ServiceQueuer(self.txn_ctrl, hs)
|
||||
|
||||
async def start(self) -> None:
|
||||
logger.info("Starting appservice scheduler")
|
||||
@@ -184,9 +182,7 @@ class _ServiceQueuer:
|
||||
appservice at a given time.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, txn_ctrl: "_TransactionController", clock: Clock, hs: "HomeServer"
|
||||
):
|
||||
def __init__(self, txn_ctrl: "_TransactionController", hs: "HomeServer"):
|
||||
# dict of {service_id: [events]}
|
||||
self.queued_events: Dict[str, List[EventBase]] = {}
|
||||
# dict of {service_id: [events]}
|
||||
@@ -199,10 +195,11 @@ class _ServiceQueuer:
|
||||
# the appservices which currently have a transaction in flight
|
||||
self.requests_in_flight: Set[str] = set()
|
||||
self.txn_ctrl = txn_ctrl
|
||||
self.clock = clock
|
||||
self._msc3202_transaction_extensions_enabled: bool = (
|
||||
hs.config.experimental.msc3202_transaction_extensions
|
||||
)
|
||||
self.server_name = hs.hostname
|
||||
self.clock = hs.get_clock()
|
||||
self._store = hs.get_datastores().main
|
||||
|
||||
def start_background_request(self, service: ApplicationService) -> None:
|
||||
@@ -210,7 +207,9 @@ class _ServiceQueuer:
|
||||
if service.id in self.requests_in_flight:
|
||||
return
|
||||
|
||||
run_as_background_process("as-sender", self._send_request, service)
|
||||
run_as_background_process(
|
||||
"as-sender", self.server_name, self._send_request, service
|
||||
)
|
||||
|
||||
async def _send_request(self, service: ApplicationService) -> None:
|
||||
# sanity-check: we shouldn't get here if this service already has a sender
|
||||
@@ -319,7 +318,7 @@ class _ServiceQueuer:
|
||||
users: Set[str] = set()
|
||||
|
||||
# The sender is always included
|
||||
users.add(service.sender)
|
||||
users.add(service.sender.to_string())
|
||||
|
||||
# All AS users that would receive the PDUs or EDUs sent to these rooms
|
||||
# are classed as 'interesting'.
|
||||
@@ -359,10 +358,11 @@ class _TransactionController:
|
||||
(Note we have only have one of these in the homeserver.)
|
||||
"""
|
||||
|
||||
def __init__(self, clock: Clock, store: DataStore, as_api: ApplicationServiceApi):
|
||||
self.clock = clock
|
||||
self.store = store
|
||||
self.as_api = as_api
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.server_name = hs.hostname
|
||||
self.clock = hs.get_clock()
|
||||
self.store = hs.get_datastores().main
|
||||
self.as_api = hs.get_application_service_api()
|
||||
|
||||
# map from service id to recoverer instance
|
||||
self.recoverers: Dict[str, "_Recoverer"] = {}
|
||||
@@ -446,7 +446,12 @@ class _TransactionController:
|
||||
logger.info("Starting recoverer for AS ID %s", service.id)
|
||||
assert service.id not in self.recoverers
|
||||
recoverer = self.RECOVERER_CLASS(
|
||||
self.clock, self.store, self.as_api, service, self.on_recovered
|
||||
self.server_name,
|
||||
self.clock,
|
||||
self.store,
|
||||
self.as_api,
|
||||
service,
|
||||
self.on_recovered,
|
||||
)
|
||||
self.recoverers[service.id] = recoverer
|
||||
recoverer.recover()
|
||||
@@ -477,21 +482,24 @@ class _Recoverer:
|
||||
We have one of these for each appservice which is currently considered DOWN.
|
||||
|
||||
Args:
|
||||
clock (synapse.util.Clock):
|
||||
store (synapse.storage.DataStore):
|
||||
as_api (synapse.appservice.api.ApplicationServiceApi):
|
||||
service (synapse.appservice.ApplicationService): the service we are managing
|
||||
callback (callable[_Recoverer]): called once the service recovers.
|
||||
server_name: the homeserver name (used to label metrics) (this should be `hs.hostname`).
|
||||
clock:
|
||||
store:
|
||||
as_api:
|
||||
service: the service we are managing
|
||||
callback: called once the service recovers.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
server_name: str,
|
||||
clock: Clock,
|
||||
store: DataStore,
|
||||
as_api: ApplicationServiceApi,
|
||||
service: ApplicationService,
|
||||
callback: Callable[["_Recoverer"], Awaitable[None]],
|
||||
):
|
||||
self.server_name = server_name
|
||||
self.clock = clock
|
||||
self.store = store
|
||||
self.as_api = as_api
|
||||
@@ -504,7 +512,11 @@ class _Recoverer:
|
||||
delay = 2**self.backoff_counter
|
||||
logger.info("Scheduling retries on %s in %fs", self.service.id, delay)
|
||||
self.scheduled_recovery = self.clock.call_later(
|
||||
delay, run_as_background_process, "as-recoverer", self.retry
|
||||
delay,
|
||||
run_as_background_process,
|
||||
"as-recoverer",
|
||||
self.server_name,
|
||||
self.retry,
|
||||
)
|
||||
|
||||
def _backoff(self) -> None:
|
||||
@@ -525,6 +537,7 @@ class _Recoverer:
|
||||
# Run a retry, which will resechedule a recovery if it fails.
|
||||
run_as_background_process(
|
||||
"retry",
|
||||
self.server_name,
|
||||
self.retry,
|
||||
)
|
||||
|
||||
|
||||
@@ -909,7 +909,10 @@ class RootConfig:
|
||||
|
||||
|
||||
def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]:
|
||||
"""Read the config files into a dict
|
||||
"""Read the config files and shallowly merge them into a dict.
|
||||
|
||||
Successive configurations are shallowly merged into ones provided earlier,
|
||||
i.e., entirely replacing top-level sections of the configuration.
|
||||
|
||||
Args:
|
||||
config_files: A list of the config files to read
|
||||
|
||||
@@ -122,8 +122,7 @@ def _load_appservice(
|
||||
localpart = as_info["sender_localpart"]
|
||||
if urlparse.quote(localpart) != localpart:
|
||||
raise ValueError("sender_localpart needs characters which are not URL encoded.")
|
||||
user = UserID(localpart, hostname)
|
||||
user_id = user.to_string()
|
||||
user_id = UserID(localpart, hostname)
|
||||
|
||||
# Rate limiting for users of this AS is on by default (excludes sender)
|
||||
rate_limited = as_info.get("rate_limited")
|
||||
|
||||
@@ -23,7 +23,17 @@ from typing import Any
|
||||
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
from ._base import Config, ConfigError, read_file
|
||||
|
||||
CONFLICTING_RECAPTCHA_PRIVATE_KEY_OPTS_ERROR = """\
|
||||
You have configured both `recaptcha_private_key` and
|
||||
`recaptcha_private_key_path`. These are mutually incompatible.
|
||||
"""
|
||||
|
||||
CONFLICTING_RECAPTCHA_PUBLIC_KEY_OPTS_ERROR = """\
|
||||
You have configured both `recaptcha_public_key` and `recaptcha_public_key_path`.
|
||||
These are mutually incompatible.
|
||||
"""
|
||||
|
||||
|
||||
class CaptchaConfig(Config):
|
||||
@@ -38,6 +48,13 @@ class CaptchaConfig(Config):
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("recaptcha_private_key",),
|
||||
)
|
||||
recaptcha_private_key_path = config.get("recaptcha_private_key_path")
|
||||
if recaptcha_private_key_path:
|
||||
if recaptcha_private_key:
|
||||
raise ConfigError(CONFLICTING_RECAPTCHA_PRIVATE_KEY_OPTS_ERROR)
|
||||
recaptcha_private_key = read_file(
|
||||
recaptcha_private_key_path, ("recaptcha_private_key_path",)
|
||||
).strip()
|
||||
if recaptcha_private_key is not None and not isinstance(
|
||||
recaptcha_private_key, str
|
||||
):
|
||||
@@ -50,6 +67,13 @@ class CaptchaConfig(Config):
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("recaptcha_public_key",),
|
||||
)
|
||||
recaptcha_public_key_path = config.get("recaptcha_public_key_path")
|
||||
if recaptcha_public_key_path:
|
||||
if recaptcha_public_key:
|
||||
raise ConfigError(CONFLICTING_RECAPTCHA_PUBLIC_KEY_OPTS_ERROR)
|
||||
recaptcha_public_key = read_file(
|
||||
recaptcha_public_key_path, ("recaptcha_public_key_path",)
|
||||
).strip()
|
||||
if recaptcha_public_key is not None and not isinstance(
|
||||
recaptcha_public_key, str
|
||||
):
|
||||
|
||||
@@ -42,6 +42,9 @@ class CasConfig(Config):
|
||||
self.cas_enabled = cas_config and cas_config.get("enabled", True)
|
||||
|
||||
if self.cas_enabled:
|
||||
if not isinstance(cas_config, dict):
|
||||
raise ConfigError("Must be a dictionary", ("cas_config",))
|
||||
|
||||
self.cas_server_url = cas_config["server_url"]
|
||||
|
||||
# TODO Update this to a _synapse URL.
|
||||
|
||||
@@ -561,6 +561,12 @@ class ExperimentalConfig(Config):
|
||||
# MSC4076: Add `disable_badge_count`` to pusher configuration
|
||||
self.msc4076_enabled: bool = experimental.get("msc4076_enabled", False)
|
||||
|
||||
# MSC4277: Harmonizing the reporting endpoints
|
||||
#
|
||||
# If enabled, ignore the score parameter and respond with HTTP 200 on
|
||||
# reporting requests regardless of the subject's existence.
|
||||
self.msc4277_enabled: bool = experimental.get("msc4277_enabled", False)
|
||||
|
||||
# MSC4235: Add `via` param to hierarchy endpoint
|
||||
self.msc4235_enabled: bool = experimental.get("msc4235_enabled", False)
|
||||
|
||||
@@ -575,3 +581,10 @@ class ExperimentalConfig(Config):
|
||||
|
||||
# MSC4155: Invite filtering
|
||||
self.msc4155_enabled: bool = experimental.get("msc4155_enabled", False)
|
||||
|
||||
# MSC4293: Redact on Kick/Ban
|
||||
self.msc4293_enabled: bool = experimental.get("msc4293_enabled", False)
|
||||
|
||||
# MSC4306: Thread Subscriptions
|
||||
# (and MSC4308: sliding sync extension for thread subscriptions)
|
||||
self.msc4306_enabled: bool = experimental.get("msc4306_enabled", False)
|
||||
|
||||
@@ -212,11 +212,14 @@ class KeyConfig(Config):
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("form_secret",),
|
||||
)
|
||||
if form_secret is not None and not isinstance(form_secret, str):
|
||||
raise ConfigError("Config option must be a string", ("form_secret",))
|
||||
|
||||
form_secret_path = config.get("form_secret_path", None)
|
||||
if form_secret_path:
|
||||
if form_secret:
|
||||
raise ConfigError(CONFLICTING_FORM_SECRET_OPTS_ERROR)
|
||||
self.form_secret = read_file(
|
||||
self.form_secret: Optional[str] = read_file(
|
||||
form_secret_path, ("form_secret_path",)
|
||||
).strip()
|
||||
else:
|
||||
|
||||
@@ -241,6 +241,12 @@ class RatelimitConfig(Config):
|
||||
defaults={"per_second": 1, "burst_count": 5},
|
||||
)
|
||||
|
||||
self.rc_room_creation = RatelimitSettings.parse(
|
||||
config,
|
||||
"rc_room_creation",
|
||||
defaults={"per_second": 0.016, "burst_count": 10},
|
||||
)
|
||||
|
||||
self.rc_reports = RatelimitSettings.parse(
|
||||
config,
|
||||
"rc_reports",
|
||||
|
||||
@@ -22,11 +22,10 @@
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from urllib.request import getproxies_environment
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.config.server import generate_ip_set
|
||||
from synapse.config.server import generate_ip_set, parse_proxy_config
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.check_dependencies import check_requirements
|
||||
from synapse.util.module_loader import load_module
|
||||
@@ -61,7 +60,7 @@ THUMBNAIL_SUPPORTED_MEDIA_FORMAT_MAP = {
|
||||
"image/png": "png",
|
||||
}
|
||||
|
||||
HTTP_PROXY_SET_WARNING = """\
|
||||
URL_PREVIEW_BLACKLIST_IGNORED_BECAUSE_HTTP_PROXY_SET_WARNING = """\
|
||||
The Synapse config url_preview_ip_range_blacklist will be ignored as an HTTP(s) proxy is configured."""
|
||||
|
||||
|
||||
@@ -119,6 +118,15 @@ def parse_thumbnail_requirements(
|
||||
}
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class MediaUploadLimit:
|
||||
"""A limit on the amount of data a user can upload in a given time
|
||||
period."""
|
||||
|
||||
max_bytes: int
|
||||
time_period_ms: int
|
||||
|
||||
|
||||
class ContentRepositoryConfig(Config):
|
||||
section = "media"
|
||||
|
||||
@@ -225,17 +233,25 @@ class ContentRepositoryConfig(Config):
|
||||
if self.url_preview_enabled:
|
||||
check_requirements("url-preview")
|
||||
|
||||
proxy_env = getproxies_environment()
|
||||
if "url_preview_ip_range_blacklist" not in config:
|
||||
if "http" not in proxy_env or "https" not in proxy_env:
|
||||
proxy_config = parse_proxy_config(config)
|
||||
is_proxy_configured = (
|
||||
proxy_config.http_proxy is not None
|
||||
or proxy_config.https_proxy is not None
|
||||
)
|
||||
if "url_preview_ip_range_blacklist" in config:
|
||||
if is_proxy_configured:
|
||||
logger.warning(
|
||||
"".join(
|
||||
URL_PREVIEW_BLACKLIST_IGNORED_BECAUSE_HTTP_PROXY_SET_WARNING
|
||||
)
|
||||
)
|
||||
else:
|
||||
if not is_proxy_configured:
|
||||
raise ConfigError(
|
||||
"For security, you must specify an explicit target IP address "
|
||||
"blacklist in url_preview_ip_range_blacklist for url previewing "
|
||||
"to work"
|
||||
)
|
||||
else:
|
||||
if "http" in proxy_env or "https" in proxy_env:
|
||||
logger.warning("".join(HTTP_PROXY_SET_WARNING))
|
||||
|
||||
# we always block '0.0.0.0' and '::', which are supposed to be
|
||||
# unroutable addresses.
|
||||
@@ -274,6 +290,13 @@ class ContentRepositoryConfig(Config):
|
||||
|
||||
self.enable_authenticated_media = config.get("enable_authenticated_media", True)
|
||||
|
||||
self.media_upload_limits: List[MediaUploadLimit] = []
|
||||
for limit_config in config.get("media_upload_limits", []):
|
||||
time_period_ms = self.parse_duration(limit_config["time_period"])
|
||||
max_bytes = self.parse_size(limit_config["max_size"])
|
||||
|
||||
self.media_upload_limits.append(MediaUploadLimit(max_bytes, time_period_ms))
|
||||
|
||||
def generate_config_section(self, data_dir_path: str, **kwargs: Any) -> str:
|
||||
assert data_dir_path is not None
|
||||
media_store = os.path.join(data_dir_path, "media_store")
|
||||
|
||||
@@ -25,11 +25,13 @@ import logging
|
||||
import os.path
|
||||
import urllib.parse
|
||||
from textwrap import indent
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, TypedDict, Union
|
||||
from urllib.request import getproxies_environment
|
||||
|
||||
import attr
|
||||
import yaml
|
||||
from netaddr import AddrFormatError, IPNetwork, IPSet
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
from twisted.conch.ssh.keys import Key
|
||||
|
||||
@@ -43,6 +45,21 @@ from ._util import validate_config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Directly from the mypy docs:
|
||||
# https://typing.python.org/en/latest/spec/narrowing.html#typeguard
|
||||
def is_str_list(val: Any, allow_empty: bool) -> TypeGuard[list[str]]:
|
||||
"""
|
||||
Type-narrow a value to a list of strings (compatible with mypy).
|
||||
"""
|
||||
if not isinstance(val, list):
|
||||
return False
|
||||
|
||||
if len(val) == 0:
|
||||
return allow_empty
|
||||
return all(isinstance(x, str) for x in val)
|
||||
|
||||
|
||||
DIRECT_TCP_ERROR = """
|
||||
Using direct TCP replication for workers is no longer supported.
|
||||
|
||||
@@ -291,6 +308,102 @@ class LimitRemoteRoomsConfig:
|
||||
)
|
||||
|
||||
|
||||
class ProxyConfigDictionary(TypedDict):
|
||||
"""
|
||||
Dictionary of proxy settings suitable for interacting with `urllib.request` API's
|
||||
"""
|
||||
|
||||
http: Optional[str]
|
||||
"""
|
||||
Proxy server to use for HTTP requests.
|
||||
"""
|
||||
https: Optional[str]
|
||||
"""
|
||||
Proxy server to use for HTTPS requests.
|
||||
"""
|
||||
no: str
|
||||
"""
|
||||
Comma-separated list of hosts, IP addresses, or IP ranges in CIDR format which
|
||||
should not use the proxy.
|
||||
|
||||
Empty string means no hosts should be excluded from the proxy.
|
||||
"""
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class ProxyConfig:
|
||||
"""
|
||||
Synapse configuration for HTTP proxy settings.
|
||||
"""
|
||||
|
||||
http_proxy: Optional[str]
|
||||
"""
|
||||
Proxy server to use for HTTP requests.
|
||||
"""
|
||||
https_proxy: Optional[str]
|
||||
"""
|
||||
Proxy server to use for HTTPS requests.
|
||||
"""
|
||||
no_proxy_hosts: Optional[List[str]]
|
||||
"""
|
||||
List of hosts, IP addresses, or IP ranges in CIDR format which should not use the
|
||||
proxy. Synapse will directly connect to these hosts.
|
||||
"""
|
||||
|
||||
def get_proxies_dictionary(self) -> ProxyConfigDictionary:
|
||||
"""
|
||||
Returns a dictionary of proxy settings suitable for interacting with
|
||||
`urllib.request` API's (e.g. `urllib.request.proxy_bypass_environment`)
|
||||
|
||||
The keys are `"http"`, `"https"`, and `"no"`.
|
||||
"""
|
||||
return ProxyConfigDictionary(
|
||||
http=self.http_proxy,
|
||||
https=self.https_proxy,
|
||||
no=",".join(self.no_proxy_hosts) if self.no_proxy_hosts else "",
|
||||
)
|
||||
|
||||
|
||||
def parse_proxy_config(config: JsonDict) -> ProxyConfig:
|
||||
"""
|
||||
Figure out forward proxy config for outgoing HTTP requests.
|
||||
|
||||
Prefer values from the given config over the environment variables (`http_proxy`,
|
||||
`https_proxy`, `no_proxy`, not case-sensitive).
|
||||
|
||||
Args:
|
||||
config: The top-level homeserver configuration dictionary.
|
||||
"""
|
||||
proxies_from_env = getproxies_environment()
|
||||
http_proxy = config.get("http_proxy", proxies_from_env.get("http"))
|
||||
if http_proxy is not None and not isinstance(http_proxy, str):
|
||||
raise ConfigError("'http_proxy' must be a string", ("http_proxy",))
|
||||
|
||||
https_proxy = config.get("https_proxy", proxies_from_env.get("https"))
|
||||
if https_proxy is not None and not isinstance(https_proxy, str):
|
||||
raise ConfigError("'https_proxy' must be a string", ("https_proxy",))
|
||||
|
||||
# List of hosts which should not use the proxy. Synapse will directly connect to
|
||||
# these hosts.
|
||||
no_proxy_hosts = config.get("no_proxy_hosts")
|
||||
# The `no_proxy` environment variable should be a comma-separated list of hosts,
|
||||
# IP addresses, or IP ranges in CIDR format
|
||||
no_proxy_from_env = proxies_from_env.get("no")
|
||||
if no_proxy_hosts is None and no_proxy_from_env is not None:
|
||||
no_proxy_hosts = no_proxy_from_env.split(",")
|
||||
|
||||
if no_proxy_hosts is not None and not is_str_list(no_proxy_hosts, allow_empty=True):
|
||||
raise ConfigError(
|
||||
"'no_proxy_hosts' must be a list of strings", ("no_proxy_hosts",)
|
||||
)
|
||||
|
||||
return ProxyConfig(
|
||||
http_proxy=http_proxy,
|
||||
https_proxy=https_proxy,
|
||||
no_proxy_hosts=no_proxy_hosts,
|
||||
)
|
||||
|
||||
|
||||
class ServerConfig(Config):
|
||||
section = "server"
|
||||
|
||||
@@ -718,6 +831,17 @@ class ServerConfig(Config):
|
||||
)
|
||||
)
|
||||
|
||||
# Figure out forward proxy config for outgoing HTTP requests.
|
||||
#
|
||||
# Prefer values from the file config over the environment variables
|
||||
self.proxy_config = parse_proxy_config(config)
|
||||
logger.debug(
|
||||
"Using proxy settings: http_proxy=%s, https_proxy=%s, no_proxy=%s",
|
||||
self.proxy_config.http_proxy,
|
||||
self.proxy_config.https_proxy,
|
||||
self.proxy_config.no_proxy_hosts,
|
||||
)
|
||||
|
||||
self.cleanup_extremities_with_dummy_events = config.get(
|
||||
"cleanup_extremities_with_dummy_events", True
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user