mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-11 01:40:27 +00:00
Compare commits
27 Commits
v1.139.1
...
anoa/fix_d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dcd04493ad | ||
|
|
44ae428c88 | ||
|
|
6ff181dbc7 | ||
|
|
fd8fa97b6a | ||
|
|
5266e423e2 | ||
|
|
0458f691b6 | ||
|
|
25fa555395 | ||
|
|
7708801d56 | ||
|
|
d3fc638c29 | ||
|
|
6c292dc4ee | ||
|
|
120389b077 | ||
|
|
71b34b3a07 | ||
|
|
e766f325af | ||
|
|
512b3f50cf | ||
|
|
0fbf296c99 | ||
|
|
0c8594c9a8 | ||
|
|
35c9cbb09d | ||
|
|
9680804496 | ||
|
|
8f63e2246a | ||
|
|
aa83d660d5 | ||
|
|
641ced06a2 | ||
|
|
354f1cc219 | ||
|
|
478f593b6c | ||
|
|
cd6c424adb | ||
|
|
b70f668a8c | ||
|
|
0447496549 | ||
|
|
9ed0d36fe2 |
29
.ci/scripts/triage_labelled_issue.sh
Executable file
29
.ci/scripts/triage_labelled_issue.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# 1) Resolve project ID.
|
||||
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
|
||||
|
||||
# 2) Find existing item (project card) for this issue.
|
||||
ITEM_ID=$(
|
||||
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
|
||||
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
|
||||
)
|
||||
|
||||
# 3) If one doesn't exist, add this issue to the project.
|
||||
if [ -z "${ITEM_ID:-}" ]; then
|
||||
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
|
||||
fi
|
||||
|
||||
# 4) Get Status field id + the option id for TARGET_STATUS.
|
||||
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
|
||||
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
|
||||
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
|
||||
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
|
||||
|
||||
if [ -z "${OPTION_ID:-}" ]; then
|
||||
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
|
||||
fi
|
||||
|
||||
# 5) Set Status (moves item to the matching column in the board view).
|
||||
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"
|
||||
2
.github/workflows/fix_lint.yaml
vendored
2
.github/workflows/fix_lint.yaml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
|
||||
6
.github/workflows/latest_deps.yml
vendored
6
.github/workflows/latest_deps.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
|
||||
26
.github/workflows/tests.yml
vendored
26
.github/workflows/tests.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -157,7 +157,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -220,7 +220,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
@@ -240,7 +240,7 @@ jobs:
|
||||
with:
|
||||
components: clippy
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
@@ -259,7 +259,7 @@ jobs:
|
||||
with:
|
||||
toolchain: nightly-2025-04-23
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
@@ -276,7 +276,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -315,7 +315,7 @@ jobs:
|
||||
# `.rustfmt.toml`.
|
||||
toolchain: nightly-2025-04-23
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
@@ -415,7 +415,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -459,7 +459,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
@@ -576,7 +576,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
@@ -722,7 +722,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
@@ -756,7 +756,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo test
|
||||
|
||||
@@ -776,7 +776,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
|
||||
53
.github/workflows/triage_labelled.yml
vendored
53
.github/workflows/triage_labelled.yml
vendored
@@ -6,43 +6,26 @@ on:
|
||||
|
||||
jobs:
|
||||
move_needs_info:
|
||||
name: Move X-Needs-Info on the triage board
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
PROJECT_OWNER: matrix-org
|
||||
# Backend issue triage board.
|
||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
||||
PROJECT_NUMBER: 67
|
||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
||||
# This field is case-sensitive.
|
||||
TARGET_STATUS: Needs info
|
||||
steps:
|
||||
- uses: actions/add-to-project@4515659e2b458b27365e167605ac44f219494b66 # v1.0.2
|
||||
id: add_project
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
# This action will error if the issue already exists on the project. Which is
|
||||
# common as `X-Needs-Info` will often be added to issues that are already in
|
||||
# the triage queue. Prevent the whole job from failing in this case.
|
||||
continue-on-error: true
|
||||
- name: Set status
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
run: |
|
||||
gh api graphql -f query='
|
||||
mutation(
|
||||
$project: ID!
|
||||
$item: ID!
|
||||
$fieldid: ID!
|
||||
$columnid: String!
|
||||
) {
|
||||
updateProjectV2ItemFieldValue(
|
||||
input: {
|
||||
projectId: $project
|
||||
itemId: $item
|
||||
fieldId: $fieldid
|
||||
value: {
|
||||
singleSelectOptionId: $columnid
|
||||
}
|
||||
}
|
||||
) {
|
||||
projectV2Item {
|
||||
id
|
||||
}
|
||||
}
|
||||
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
||||
# Only clone the script file we care about, instead of the whole repo.
|
||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
- name: Ensure issue exists on the board, then set Status
|
||||
run: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
6
.github/workflows/twisted_trunk.yml
vendored
6
.github/workflows/twisted_trunk.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
|
||||
58
CHANGES.md
58
CHANGES.md
@@ -1,47 +1,8 @@
|
||||
# Synapse 1.139.1 (2025-10-07)
|
||||
|
||||
## Security Fixes
|
||||
|
||||
- Fix [CVE-2025-61672](https://www.cve.org/CVERecord?id=CVE-2025-61672) / [GHSA-fh66-fcv5-jjfr](https://github.com/element-hq/synapse/security/advisories/GHSA-fh66-fcv5-jjfr). Lack of validation for device keys in Synapse before 1.139.1 allows an attacker registered on the victim homeserver to degrade federation functionality, unpredictably breaking outbound federation to other homeservers. ([\#17097](https://github.com/element-hq/synapse/issues/17097))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.0 (2025-09-30)
|
||||
|
||||
### `/register` requests from old application service implementations may break when using MAS
|
||||
|
||||
If you are using Matrix Authentication Service (MAS), as of this release any
|
||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
||||
/_matrix/client/v3/register` will receive the error
|
||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. Please see [the
|
||||
upgrade
|
||||
notes](https://element-hq.github.io/synapse/develop/upgrade.html#register-requests-from-old-application-service-implementations-may-break-when-using-mas)
|
||||
for more information.
|
||||
|
||||
No significant changes since 1.139.0rc3.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.0rc3 (2025-09-25)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.139.0rc1 where `run_coroutine_in_background(...)` incorrectly handled logcontexts, resulting in partially broken logging. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.0rc2 (2025-09-23)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
||||
|
||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.139.0rc1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
||||
|
||||
|
||||
|
||||
@@ -112,6 +73,23 @@ No significant changes since 1.139.0rc3.
|
||||
* Bump types-requests from 2.32.4.20250611 to 2.32.4.20250809. ([\#18895](https://github.com/element-hq/synapse/issues/18895))
|
||||
* Bump types-setuptools from 80.9.0.20250809 to 80.9.0.20250822. ([\#18924](https://github.com/element-hq/synapse/issues/18924))
|
||||
|
||||
# Synapse 1.138.2 (2025-09-24)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.138.1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.1 (2025-09-24)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.0 (2025-09-09)
|
||||
|
||||
No significant changes since 1.138.0rc1.
|
||||
|
||||
16
Cargo.lock
generated
16
Cargo.lock
generated
@@ -28,9 +28,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.99"
|
||||
version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -1250,9 +1250,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.224"
|
||||
version = "1.0.226"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6aaeb1e94f53b16384af593c71e20b095e958dab1d26939c1b70645c5cfbcc0b"
|
||||
checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
@@ -1260,18 +1260,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.224"
|
||||
version = "1.0.226"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32f39390fa6346e24defbcdd3d9544ba8a19985d0af74df8501fbfe9a64341ab"
|
||||
checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.224"
|
||||
version = "1.0.226"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87ff78ab5e8561c9a675bfc1785cb07ae721f0ee53329a595cefd8c04c2ac4e0"
|
||||
checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
1
changelog.d/18721.bugfix
Normal file
1
changelog.d/18721.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix room upgrade `room_config` argument and documentation for `user_may_create_room` spam-checker callback.
|
||||
2
changelog.d/18911.feature
Normal file
2
changelog.d/18911.feature
Normal file
@@ -0,0 +1,2 @@
|
||||
Add an Admin API that allows server admins to to query and investigate the metadata of local or cached remote media via
|
||||
the `origin/media_id` identifier found in a [Matrix Content URI](https://spec.matrix.org/v1.14/client-server-api/#matrix-content-mxc-uris).
|
||||
1
changelog.d/18913.misc
Normal file
1
changelog.d/18913.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix the GitHub Actions workflow that moves issues labeled "X-Needs-Info" to the "Needs info" column on the team's internal triage board.
|
||||
1
changelog.d/18914.doc
Normal file
1
changelog.d/18914.doc
Normal file
@@ -0,0 +1 @@
|
||||
Explain how Deferred callbacks interact with logcontexts.
|
||||
1
changelog.d/18939.misc
Normal file
1
changelog.d/18939.misc
Normal file
@@ -0,0 +1 @@
|
||||
Reduce overall number of calls to `_get_e2e_cross_signing_signatures_for_devices` by increasing the batch size of devices the query is called with, reducing DB load.
|
||||
1
changelog.d/18947.misc
Normal file
1
changelog.d/18947.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update error code used when an appservice tries to masquerade as an unknown device using [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326). Contributed by @tulir @ Beeper.
|
||||
1
changelog.d/18948.bugfix
Normal file
1
changelog.d/18948.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Compute a user's last seen timestamp from their devices' last seen timestamps instead of IPs, because the latter are automatically cleared according to `user_ips_max_age`.
|
||||
1
changelog.d/18959.misc
Normal file
1
changelog.d/18959.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix `no active span when trying to log` tracing error on startup (when OpenTracing is enabled).
|
||||
1
changelog.d/18964.misc
Normal file
1
changelog.d/18964.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix `run_coroutine_in_background(...)` incorrectly handling logcontext.
|
||||
1
changelog.d/18971.misc
Normal file
1
changelog.d/18971.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update dockerfile metadata to fix broken link; point to documentation website.
|
||||
30
debian/changelog
vendored
30
debian/changelog
vendored
@@ -1,21 +1,3 @@
|
||||
matrix-synapse-py3 (1.139.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc2.
|
||||
@@ -28,6 +10,18 @@ matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 12:26:16 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.0.
|
||||
|
||||
@@ -171,8 +171,8 @@ FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
||||
|
||||
ARG TARGETARCH
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||
LABEL org.opencontainers.image.url='https://github.com/element-hq/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synapse/latest/'
|
||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||
|
||||
|
||||
@@ -39,6 +39,40 @@ the use of the
|
||||
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
|
||||
Admin API.
|
||||
|
||||
## Query a piece of media by ID
|
||||
|
||||
This API returns information about a piece of local or cached remote media given the origin server name and media id. If
|
||||
information is requested for remote media which is not cached the endpoint will return 404.
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/media/<origin>/<media_id>
|
||||
```
|
||||
|
||||
The API returns a JSON body with media info like the following:
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"media_info": {
|
||||
"media_origin": "remote.com",
|
||||
"user_id": null,
|
||||
"media_id": "sdginwegWEG",
|
||||
"media_type": "img/png",
|
||||
"media_length": 67,
|
||||
"upload_name": "test.png",
|
||||
"created_ts": 300,
|
||||
"filesystem_id": "wgeweg",
|
||||
"url_cache": null,
|
||||
"last_access_ts": 400,
|
||||
"quarantined_by": null,
|
||||
"authenticated": false,
|
||||
"safe_from_quarantine": null,
|
||||
"sha256": "ebf4f635a17d10d6eb46ba680b70142419aa3220f228001a036d311a22ee9d2a"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
# Quarantine media
|
||||
|
||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||
|
||||
@@ -143,8 +143,7 @@ cares about.
|
||||
The following sections describe pitfalls and helpful patterns when
|
||||
implementing these rules.
|
||||
|
||||
Always await your awaitables
|
||||
----------------------------
|
||||
## Always await your awaitables
|
||||
|
||||
Whenever you get an awaitable back from a function, you should `await` on
|
||||
it as soon as possible. Do not pass go; do not do any logging; do not
|
||||
@@ -203,6 +202,171 @@ async def sleep(seconds):
|
||||
return await context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
||||
```
|
||||
|
||||
## Deferred callbacks
|
||||
|
||||
When a deferred callback is called, it inherits the current logcontext. The deferred
|
||||
callback chain can resume a coroutine, which if following our logcontext rules, will
|
||||
restore its own logcontext, then run:
|
||||
|
||||
- until it yields control back to the reactor, setting the sentinel logcontext
|
||||
- or until it finishes, restoring the logcontext it was started with (calling context)
|
||||
|
||||
This behavior creates two specific issues:
|
||||
|
||||
**Issue 1:** The first issue is that the callback may have reset the logcontext to the
|
||||
sentinel before returning. This means our calling function will continue with the
|
||||
sentinel logcontext instead of the logcontext it was started with (bad).
|
||||
|
||||
**Issue 2:** The second issue is that the current logcontext that called the deferred
|
||||
callback could finish before the callback finishes (bad).
|
||||
|
||||
In the following example, the deferred callback is called with the "main" logcontext and
|
||||
runs until we yield control back to the reactor in the `await` inside `clock.sleep(0)`.
|
||||
Since `clock.sleep(0)` follows our logcontext rules, it sets the logcontext to the
|
||||
sentinel before yielding control back to the reactor. Our `main` function continues with
|
||||
the sentinel logcontext (first bad thing) instead of the "main" logcontext. Then the
|
||||
`with LoggingContext("main")` block exits, finishing the "main" logcontext and yielding
|
||||
control back to the reactor again. Finally, later on when `clock.sleep(0)` completes,
|
||||
our `with LoggingContext("competing")` block exits, and restores the previous "main"
|
||||
logcontext which has already finished, resulting in `WARNING: Re-starting finished log
|
||||
context main` and leaking the `main` logcontext into the reactor which will then
|
||||
erronously be associated with the next task the reactor picks up.
|
||||
|
||||
```python
|
||||
async def competing_callback():
|
||||
# Since this is run with the "main" logcontext, when the "competing"
|
||||
# logcontext exits, it will restore the previous "main" logcontext which has
|
||||
# already finished and results in "WARNING: Re-starting finished log context main"
|
||||
# and leaking the `main` logcontext into the reactor.
|
||||
with LoggingContext("competing"):
|
||||
await clock.sleep(0)
|
||||
|
||||
def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
# Call the callback within the "main" logcontext.
|
||||
d.callback(None)
|
||||
# Bad: This will be logged against sentinel logcontext
|
||||
logger.debug("ugh")
|
||||
|
||||
main()
|
||||
```
|
||||
|
||||
**Solution 1:** We could of course fix this by following the general rule of "always
|
||||
await your awaitables":
|
||||
|
||||
```python
|
||||
async def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
d.callback(None)
|
||||
# Wait for `d` to finish before continuing so the "main" logcontext is
|
||||
# still active. This works because `d` already follows our logcontext
|
||||
# rules. If not, we would also have to use `make_deferred_yieldable(d)`.
|
||||
await d
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
**Solution 2:** We could also fix this by surrounding the call to `d.callback` with a
|
||||
`PreserveLoggingContext`, which will reset the logcontext to the sentinel before calling
|
||||
the callback, and restore the "main" logcontext afterwards before continuing the `main`
|
||||
function. This solves the problem because when the "competing" logcontext exits, it will
|
||||
restore the sentinel logcontext which is never finished by its nature, so there is no
|
||||
warning and no leakage into the reactor.
|
||||
|
||||
```python
|
||||
async def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
d.callback(None)
|
||||
with PreserveLoggingContext():
|
||||
# Call the callback with the sentinel logcontext.
|
||||
d.callback(None)
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
**Solution 3:** But let's say you *do* want to run (fire-and-forget) the deferred
|
||||
callback in the current context without running into issues:
|
||||
|
||||
We can solve the first issue by using `run_in_background(...)` to run the callback in
|
||||
the current logcontext and it handles the magic behind the scenes of a) restoring the
|
||||
calling logcontext before returning to the caller and b) resetting the logcontext to the
|
||||
sentinel after the deferred completes and we yield control back to the reactor to avoid
|
||||
leaking the logcontext into the reactor.
|
||||
|
||||
To solve the second issue, we can extend the lifetime of the "main" logcontext by
|
||||
avoiding the `LoggingContext`'s context manager lifetime methods
|
||||
(`__enter__`/`__exit__`). We can still set "main" as the current logcontext by using
|
||||
`PreserveLoggingContext` and passing in the "main" logcontext.
|
||||
|
||||
|
||||
```python
|
||||
async def main():
|
||||
main_context = LoggingContext("main")
|
||||
with PreserveLoggingContext(main_context):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
# The whole lambda will be run in the "main" logcontext. But we're using
|
||||
# a trick to return the deferred `d` itself so that `run_in_background`
|
||||
# will wait on that to complete and reset the logcontext to the sentinel
|
||||
# when it does to avoid leaking the "main" logcontext into the reactor.
|
||||
run_in_background(lambda: (d.callback(None), d)[1])
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
|
||||
...
|
||||
|
||||
# Wherever possible, it's best to finish the logcontext by calling `__exit__` at some
|
||||
# point. This allows us to catch bugs if we later try to erroneously restart a finished
|
||||
# logcontext.
|
||||
#
|
||||
# Since the "main" logcontext stores the `LoggingContext.previous_context` when it is
|
||||
# created, we can wrap this call in `PreserveLoggingContext()` to restore the correct
|
||||
# previous logcontext. Our goal is to have the calling context remain unchanged after
|
||||
# finishing the "main" logcontext.
|
||||
with PreserveLoggingContext():
|
||||
# Finish the "main" logcontext
|
||||
with main_context:
|
||||
# Empty block - We're just trying to call `__exit__` on the "main" context
|
||||
# manager to finish it. We can't call `__exit__` directly as the code expects us
|
||||
# to `__enter__` before calling `__exit__` to `start`/`stop` things
|
||||
# appropriately. And in any case, it's probably best not to call the internal
|
||||
# methods directly.
|
||||
pass
|
||||
```
|
||||
|
||||
The same thing applies if you have some deferreds stored somewhere which you want to
|
||||
callback in the current logcontext.
|
||||
|
||||
|
||||
### Deferred errbacks and cancellations
|
||||
|
||||
The same care should be taken when calling errbacks on deferreds. An errback and
|
||||
callback act the same in this regard (see section above).
|
||||
|
||||
```python
|
||||
d = defer.Deferred()
|
||||
d.addErrback(some_other_function)
|
||||
d.errback(failure)
|
||||
```
|
||||
|
||||
Additionally, cancellation is the same as directly calling the errback with a
|
||||
`twisted.internet.defer.CancelledError`:
|
||||
|
||||
```python
|
||||
d = defer.Deferred()
|
||||
d.addErrback(some_other_function)
|
||||
d.cancel()
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
## Fire-and-forget
|
||||
|
||||
Sometimes you want to fire off a chain of execution, but not wait for
|
||||
|
||||
@@ -195,12 +195,15 @@ _Changed in Synapse v1.132.0: Added the `room_config` argument. Callbacks that o
|
||||
async def user_may_create_room(user_id: str, room_config: synapse.module_api.JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||
```
|
||||
|
||||
Called when processing a room creation request.
|
||||
Called when processing a room creation or room upgrade request.
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`).
|
||||
* `room_config`: The contents of the body of a [/createRoom request](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
||||
* `room_config`: The contents of the body of the [`/createRoom` request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
||||
For a [room upgrade request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3roomsroomidupgrade) it is a synthesised subset of what an equivalent
|
||||
`/createRoom` request would have looked like. Specifically, it contains the `creation_content` (linking to the previous room) and `initial_state` (containing a
|
||||
subset of the state of the previous room).
|
||||
|
||||
The callback must return one of:
|
||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||
|
||||
@@ -119,12 +119,6 @@ stacking them up. You can monitor the currently running background updates with
|
||||
|
||||
# Upgrading to v1.139.0
|
||||
|
||||
## Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin
|
||||
|
||||
Ubuntu 24.10 Oracular Oriole [has been end-of-life since 10 Jul
|
||||
2025](https://endoflife.date/ubuntu). This release drops support for Ubuntu
|
||||
24.10, and in its place adds support for Ubuntu 25.04 Plucky Puffin.
|
||||
|
||||
## `/register` requests from old application service implementations may break when using MAS
|
||||
|
||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
||||
@@ -140,6 +134,16 @@ ensure it is up to date. If it is, then kindly let the author know that they
|
||||
need to update their implementation to call `/register` with
|
||||
`inhibit_login=true`.
|
||||
|
||||
# Upgrading to v1.138.2
|
||||
|
||||
## Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin
|
||||
|
||||
Ubuntu 24.10 Oracular Oriole [has been end-of-life since 10 Jul
|
||||
2025](https://endoflife.date/ubuntu). This release drops support for Ubuntu
|
||||
24.10, and in its place adds support for Ubuntu 25.04 Plucky Puffin.
|
||||
|
||||
This notice also applies to the v1.139.0 release.
|
||||
|
||||
# Upgrading to v1.136.0
|
||||
|
||||
## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`
|
||||
|
||||
32
poetry.lock
generated
32
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.2.0 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
@@ -34,15 +34,15 @@ tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" a
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.6.3"
|
||||
version = "1.6.4"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
|
||||
files = [
|
||||
{file = "authlib-1.6.3-py2.py3-none-any.whl", hash = "sha256:7ea0f082edd95a03b7b72edac65ec7f8f68d703017d7e37573aee4fc603f2a48"},
|
||||
{file = "authlib-1.6.3.tar.gz", hash = "sha256:9f7a982cc395de719e4c2215c5707e7ea690ecf84f1ab126f28c053f4219e610"},
|
||||
{file = "authlib-1.6.4-py2.py3-none-any.whl", hash = "sha256:39313d2a2caac3ecf6d8f95fbebdfd30ae6ea6ae6a6db794d976405fdd9aa796"},
|
||||
{file = "authlib-1.6.4.tar.gz", hash = "sha256:104b0442a43061dc8bc23b133d1d06a2b0a9c2e3e33f34c4338929e816287649"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1531,14 +1531,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "9.0.13"
|
||||
version = "9.0.14"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "phonenumbers-9.0.13-py2.py3-none-any.whl", hash = "sha256:b97661e177773e7509c6d503e0f537cd0af22aa3746231654590876eb9430915"},
|
||||
{file = "phonenumbers-9.0.13.tar.gz", hash = "sha256:eca06e01382412c45316868f86a44bb217c02f9ee7196589041556a2f54a7639"},
|
||||
{file = "phonenumbers-9.0.14-py2.py3-none-any.whl", hash = "sha256:6bdf5c46dbfefa1d941d122432d1958418d1dfe3f8c8c81d4c8e80f5442ea41f"},
|
||||
{file = "phonenumbers-9.0.14.tar.gz", hash = "sha256:98afb3e86bf9ae02cc7c98ca44fa8827babb72842f90da9884c5d998937572ae"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1908,14 +1908,14 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
version = "2.7.0"
|
||||
version = "2.8.1"
|
||||
description = "Use the full Github API v3"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pygithub-2.7.0-py3-none-any.whl", hash = "sha256:40ecbfe26dc55cc34ab4b0ffa1d455e6f816ef9a2bc8d6f5ad18ce572f163700"},
|
||||
{file = "pygithub-2.7.0.tar.gz", hash = "sha256:7cd6eafabb09b5369afba3586d86b1f1ad6f1326d2ff01bc47bb26615dce4cbb"},
|
||||
{file = "pygithub-2.8.1-py3-none-any.whl", hash = "sha256:23a0a5bca93baef082e03411bf0ce27204c32be8bfa7abc92fe4a3e132936df0"},
|
||||
{file = "pygithub-2.8.1.tar.gz", hash = "sha256:341b7c78521cb07324ff670afd1baa2bf5c286f8d9fd302c1798ba594a5400c9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3011,14 +3011,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.4.20250809"
|
||||
version = "2.32.4.20250913"
|
||||
description = "Typing stubs for requests"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163"},
|
||||
{file = "types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3"},
|
||||
{file = "types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1"},
|
||||
{file = "types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3038,14 +3038,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.14.1"
|
||||
version = "4.15.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.9+"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"},
|
||||
{file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"},
|
||||
{file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"},
|
||||
{file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -101,7 +101,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.139.1"
|
||||
version = "1.138.2"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
@@ -354,12 +354,10 @@ class BaseAuth:
|
||||
effective_user_id, effective_device_id
|
||||
)
|
||||
if device_opt is None:
|
||||
# For now, use 400 M_EXCLUSIVE if the device doesn't exist.
|
||||
# This is an open thread of discussion on MSC3202 as of 2021-12-09.
|
||||
raise AuthError(
|
||||
400,
|
||||
f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})",
|
||||
Codes.EXCLUSIVE,
|
||||
Codes.UNKNOWN_DEVICE,
|
||||
)
|
||||
|
||||
return create_requester(
|
||||
|
||||
@@ -149,6 +149,9 @@ class Codes(str, Enum):
|
||||
)
|
||||
MSC4306_NOT_IN_THREAD = "IO.ELEMENT.MSC4306.M_NOT_IN_THREAD"
|
||||
|
||||
# Part of MSC4326
|
||||
UNKNOWN_DEVICE = "ORG.MATRIX.MSC4326.M_UNKNOWN_DEVICE"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
"""An exception with integer code, a message string attributes and optional headers.
|
||||
|
||||
@@ -73,7 +73,6 @@ from synapse.events.presence_router import load_legacy_presence_router
|
||||
from synapse.handlers.auth import load_legacy_password_auth_providers
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import LoggingContext, PreserveLoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import install_gc_manager, register_threadpool
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.metrics.jemalloc import setup_jemalloc_stats
|
||||
@@ -574,9 +573,6 @@ async def start(hs: "HomeServer") -> None:
|
||||
# Load the certificate from disk.
|
||||
refresh_certificate(hs)
|
||||
|
||||
# Start the tracer
|
||||
init_tracer(hs) # noqa
|
||||
|
||||
# Instantiate the modules so they can register their web resources to the module API
|
||||
# before we start the listeners.
|
||||
module_api = hs.get_module_api()
|
||||
|
||||
@@ -49,6 +49,7 @@ from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.server import JsonResource, OptionsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
||||
from synapse.rest import ClientRestResource, admin
|
||||
@@ -359,6 +360,9 @@ def start(config: HomeServerConfig) -> None:
|
||||
|
||||
setup_logging(hs, config, use_worker_options=True)
|
||||
|
||||
# Start the tracer
|
||||
init_tracer(hs) # noqa
|
||||
|
||||
try:
|
||||
hs.setup()
|
||||
|
||||
|
||||
@@ -29,7 +29,6 @@ from twisted.web.resource import EncodingResourceWrapper, Resource
|
||||
from twisted.web.server import GzipEncoderFactory
|
||||
|
||||
import synapse
|
||||
import synapse.config.logger
|
||||
from synapse import events
|
||||
from synapse.api.urls import (
|
||||
CLIENT_API_PREFIX,
|
||||
@@ -50,6 +49,7 @@ from synapse.app._base import (
|
||||
)
|
||||
from synapse.config._base import ConfigError, format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.additional_resource import AdditionalResource
|
||||
@@ -60,6 +60,7 @@ from synapse.http.server import (
|
||||
StaticResource,
|
||||
)
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
||||
from synapse.rest import ClientRestResource, admin
|
||||
@@ -385,7 +386,10 @@ def setup(config: HomeServerConfig) -> SynapseHomeServer:
|
||||
version_string=f"Synapse/{VERSION}",
|
||||
)
|
||||
|
||||
synapse.config.logger.setup_logging(hs, config, use_worker_options=False)
|
||||
setup_logging(hs, config, use_worker_options=False)
|
||||
|
||||
# Start the tracer
|
||||
init_tracer(hs) # noqa
|
||||
|
||||
logger.info("Setting up server")
|
||||
|
||||
|
||||
@@ -57,6 +57,7 @@ if TYPE_CHECKING:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
ONE_TIME_KEY_UPLOAD = "one_time_key_upload_lock"
|
||||
|
||||
|
||||
@@ -847,22 +848,14 @@ class E2eKeysHandler:
|
||||
"""
|
||||
time_now = self.clock.time_msec()
|
||||
|
||||
# TODO: Validate the JSON to make sure it has the right keys.
|
||||
device_keys = keys.get("device_keys", None)
|
||||
if device_keys:
|
||||
log_kv(
|
||||
{
|
||||
"message": "Updating device_keys for user.",
|
||||
"user_id": user_id,
|
||||
"device_id": device_id,
|
||||
}
|
||||
)
|
||||
await self.upload_device_keys_for_user(
|
||||
user_id=user_id,
|
||||
device_id=device_id,
|
||||
keys={"device_keys": device_keys},
|
||||
)
|
||||
else:
|
||||
log_kv({"message": "Did not update device_keys", "reason": "not a dict"})
|
||||
|
||||
one_time_keys = keys.get("one_time_keys", None)
|
||||
if one_time_keys:
|
||||
@@ -880,9 +873,10 @@ class E2eKeysHandler:
|
||||
log_kv(
|
||||
{"message": "Did not update one_time_keys", "reason": "no keys given"}
|
||||
)
|
||||
|
||||
fallback_keys = keys.get("fallback_keys")
|
||||
if fallback_keys:
|
||||
fallback_keys = keys.get("fallback_keys") or keys.get(
|
||||
"org.matrix.msc2732.fallback_keys"
|
||||
)
|
||||
if fallback_keys and isinstance(fallback_keys, dict):
|
||||
log_kv(
|
||||
{
|
||||
"message": "Updating fallback_keys for device.",
|
||||
@@ -891,6 +885,8 @@ class E2eKeysHandler:
|
||||
}
|
||||
)
|
||||
await self.store.set_e2e_fallback_keys(user_id, device_id, fallback_keys)
|
||||
elif fallback_keys:
|
||||
log_kv({"message": "Did not update fallback_keys", "reason": "not a dict"})
|
||||
else:
|
||||
log_kv(
|
||||
{"message": "Did not update fallback_keys", "reason": "no keys given"}
|
||||
|
||||
@@ -597,7 +597,7 @@ class RoomCreationHandler:
|
||||
new_room_version,
|
||||
additional_creators=additional_creators,
|
||||
)
|
||||
initial_state = {}
|
||||
initial_state: MutableStateMap = {}
|
||||
|
||||
# Replicate relevant room events
|
||||
types_to_copy: List[Tuple[str, Optional[str]]] = [
|
||||
@@ -693,14 +693,23 @@ class RoomCreationHandler:
|
||||
additional_creators,
|
||||
)
|
||||
|
||||
# We construct what the body of a call to /createRoom would look like for passing
|
||||
# to the spam checker. We don't include a preset here, as we expect the
|
||||
# We construct a subset of what the body of a call to /createRoom would look like
|
||||
# for passing to the spam checker. We don't include a preset here, as we expect the
|
||||
# initial state to contain everything we need.
|
||||
# TODO: given we are upgrading, it would make sense to pass the room_version
|
||||
# TODO: the preset might be useful too
|
||||
spam_check = await self._spam_checker_module_callbacks.user_may_create_room(
|
||||
user_id,
|
||||
{
|
||||
"creation_content": creation_content,
|
||||
"initial_state": list(initial_state.items()),
|
||||
"initial_state": [
|
||||
{
|
||||
"type": state_key[0],
|
||||
"state_key": state_key[1],
|
||||
"content": event_content,
|
||||
}
|
||||
for state_key, event_content in initial_state.items()
|
||||
],
|
||||
},
|
||||
)
|
||||
if spam_check != self._spam_checker_module_callbacks.NOT_SPAM:
|
||||
|
||||
@@ -423,6 +423,23 @@ class MediaRepository:
|
||||
send_cors=True,
|
||||
)
|
||||
|
||||
async def get_cached_remote_media_info(
|
||||
self, origin: str, media_id: str
|
||||
) -> Optional[RemoteMedia]:
|
||||
"""
|
||||
Get cached remote media info for a given origin/media ID combo. If the requested
|
||||
media is not found locally, it will not be requested over federation and the
|
||||
call will return None.
|
||||
|
||||
Args:
|
||||
origin: The origin of the remote media
|
||||
media_id: The media ID of the requested content
|
||||
|
||||
Returns:
|
||||
The info for the cached remote media or None if it was not found
|
||||
"""
|
||||
return await self.store.get_cached_remote_media(origin, media_id)
|
||||
|
||||
async def get_local_media_info(
|
||||
self, request: SynapseRequest, media_id: str, max_timeout_ms: int
|
||||
) -> Optional[LocalMedia]:
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
|
||||
import logging
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
@@ -41,7 +40,9 @@ from synapse.rest.admin._base import (
|
||||
assert_requester_is_admin,
|
||||
assert_user_is_admin,
|
||||
)
|
||||
from synapse.storage.databases.main.media_repository import MediaSortOrder
|
||||
from synapse.storage.databases.main.media_repository import (
|
||||
MediaSortOrder,
|
||||
)
|
||||
from synapse.types import JsonDict, UserID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -50,6 +51,72 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class QueryMediaById(RestServlet):
|
||||
"""
|
||||
Fetch info about a piece of local or cached remote media.
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/media/(?P<server_name>[^/]*)/(?P<media_id>[^/]*)$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.store = hs.get_datastores().main
|
||||
self.auth = hs.get_auth()
|
||||
self.server_name = hs.hostname
|
||||
self.hs = hs
|
||||
self.media_repo = hs.get_media_repository()
|
||||
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, server_name: str, media_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
await assert_user_is_admin(self.auth, requester)
|
||||
|
||||
if not self.hs.is_mine_server_name(server_name):
|
||||
remote_media_info = await self.media_repo.get_cached_remote_media_info(
|
||||
server_name, media_id
|
||||
)
|
||||
if remote_media_info is None:
|
||||
raise NotFoundError("Unknown media")
|
||||
resp = {
|
||||
"media_origin": remote_media_info.media_origin,
|
||||
"user_id": None,
|
||||
"media_id": remote_media_info.media_id,
|
||||
"media_type": remote_media_info.media_type,
|
||||
"media_length": remote_media_info.media_length,
|
||||
"upload_name": remote_media_info.upload_name,
|
||||
"created_ts": remote_media_info.created_ts,
|
||||
"filesystem_id": remote_media_info.filesystem_id,
|
||||
"url_cache": None,
|
||||
"last_access_ts": remote_media_info.last_access_ts,
|
||||
"quarantined_by": remote_media_info.quarantined_by,
|
||||
"authenticated": remote_media_info.authenticated,
|
||||
"safe_from_quarantine": None,
|
||||
"sha256": remote_media_info.sha256,
|
||||
}
|
||||
else:
|
||||
local_media_info = await self.store.get_local_media(media_id)
|
||||
if local_media_info is None:
|
||||
raise NotFoundError("Unknown media")
|
||||
resp = {
|
||||
"media_origin": None,
|
||||
"user_id": local_media_info.user_id,
|
||||
"media_id": local_media_info.media_id,
|
||||
"media_type": local_media_info.media_type,
|
||||
"media_length": local_media_info.media_length,
|
||||
"upload_name": local_media_info.upload_name,
|
||||
"created_ts": local_media_info.created_ts,
|
||||
"filesystem_id": None,
|
||||
"url_cache": local_media_info.url_cache,
|
||||
"last_access_ts": local_media_info.last_access_ts,
|
||||
"quarantined_by": local_media_info.quarantined_by,
|
||||
"authenticated": local_media_info.authenticated,
|
||||
"safe_from_quarantine": local_media_info.safe_from_quarantine,
|
||||
"sha256": local_media_info.sha256,
|
||||
}
|
||||
|
||||
return HTTPStatus.OK, {"media_info": resp}
|
||||
|
||||
|
||||
class QuarantineMediaInRoom(RestServlet):
|
||||
"""Quarantines all media in a room so that no one can download it via
|
||||
this server.
|
||||
@@ -470,3 +537,4 @@ def register_servlets_for_media_repo(hs: "HomeServer", http_server: HttpServer)
|
||||
DeleteMediaByDateSize(hs).register(http_server)
|
||||
DeleteMediaByID(hs).register(http_server)
|
||||
UserMediaRestServlet(hs).register(http_server)
|
||||
QueryMediaById(hs).register(http_server)
|
||||
|
||||
@@ -23,19 +23,10 @@
|
||||
import logging
|
||||
import re
|
||||
from collections import Counter
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
StrictBool,
|
||||
StrictStr,
|
||||
validator,
|
||||
)
|
||||
from synapse.api.auth.mas import MasDelegatedAuth
|
||||
from synapse.api.errors import (
|
||||
Codes,
|
||||
InteractiveAuthIncompleteError,
|
||||
InvalidAPICallError,
|
||||
SynapseError,
|
||||
@@ -46,13 +37,11 @@ from synapse.http.servlet import (
|
||||
parse_integer,
|
||||
parse_json_object_from_request,
|
||||
parse_string,
|
||||
validate_json_object,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.opentracing import log_kv, set_tag
|
||||
from synapse.rest.client._base import client_patterns, interactive_auth_handler
|
||||
from synapse.types import JsonDict, StreamToken
|
||||
from synapse.types.rest import RequestBodyModel
|
||||
from synapse.util.cancellation import cancellable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -70,6 +59,7 @@ class KeyUploadServlet(RestServlet):
|
||||
"device_keys": {
|
||||
"user_id": "<user_id>",
|
||||
"device_id": "<device_id>",
|
||||
"valid_until_ts": <millisecond_timestamp>,
|
||||
"algorithms": [
|
||||
"m.olm.curve25519-aes-sha2",
|
||||
]
|
||||
@@ -121,123 +111,12 @@ class KeyUploadServlet(RestServlet):
|
||||
self._clock = hs.get_clock()
|
||||
self._store = hs.get_datastores().main
|
||||
|
||||
class KeyUploadRequestBody(RequestBodyModel):
|
||||
"""
|
||||
The body of a `POST /_matrix/client/v3/keys/upload` request.
|
||||
|
||||
Based on https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload.
|
||||
"""
|
||||
|
||||
class DeviceKeys(RequestBodyModel):
|
||||
algorithms: List[StrictStr]
|
||||
"""The encryption algorithms supported by this device."""
|
||||
|
||||
device_id: StrictStr
|
||||
"""The ID of the device these keys belong to. Must match the device ID used when logging in."""
|
||||
|
||||
keys: Mapping[StrictStr, StrictStr]
|
||||
"""
|
||||
Public identity keys. The names of the properties should be in the
|
||||
format `<algorithm>:<device_id>`. The keys themselves should be encoded as
|
||||
specified by the key algorithm.
|
||||
"""
|
||||
|
||||
signatures: Mapping[StrictStr, Mapping[StrictStr, StrictStr]]
|
||||
"""Signatures for the device key object. A map from user ID, to a map from "<algorithm>:<device_id>" to the signature."""
|
||||
|
||||
user_id: StrictStr
|
||||
"""The ID of the user the device belongs to. Must match the user ID used when logging in."""
|
||||
|
||||
class KeyObject(RequestBodyModel):
|
||||
key: StrictStr
|
||||
"""The key, encoded using unpadded base64."""
|
||||
|
||||
fallback: Optional[StrictBool] = False
|
||||
"""Whether this is a fallback key. Only used when handling fallback keys."""
|
||||
|
||||
signatures: Mapping[StrictStr, Mapping[StrictStr, StrictStr]]
|
||||
"""Signature for the device. Mapped from user ID to another map of key signing identifier to the signature itself.
|
||||
|
||||
See the following for more detail: https://spec.matrix.org/v1.16/appendices/#signing-details
|
||||
"""
|
||||
|
||||
device_keys: Optional[DeviceKeys] = None
|
||||
"""Identity keys for the device. May be absent if no new identity keys are required."""
|
||||
|
||||
fallback_keys: Optional[Mapping[StrictStr, Union[StrictStr, KeyObject]]]
|
||||
"""
|
||||
The public key which should be used if the device's one-time keys are
|
||||
exhausted. The fallback key is not deleted once used, but should be
|
||||
replaced when additional one-time keys are being uploaded. The server
|
||||
will notify the client of the fallback key being used through `/sync`.
|
||||
|
||||
There can only be at most one key per algorithm uploaded, and the server
|
||||
will only persist one key per algorithm.
|
||||
|
||||
When uploading a signed key, an additional fallback: true key should be
|
||||
included to denote that the key is a fallback key.
|
||||
|
||||
May be absent if a new fallback key is not required.
|
||||
"""
|
||||
|
||||
@validator("fallback_keys", pre=True)
|
||||
def validate_fallback_keys(cls: Self, v: Any) -> Any:
|
||||
if v is None:
|
||||
return v
|
||||
if not isinstance(v, dict):
|
||||
raise TypeError("fallback_keys must be a mapping")
|
||||
|
||||
for k in v.keys():
|
||||
if not len(k.split(":")) == 2:
|
||||
raise SynapseError(
|
||||
code=HTTPStatus.BAD_REQUEST,
|
||||
errcode=Codes.BAD_JSON,
|
||||
msg=f"Invalid fallback_keys key {k!r}. "
|
||||
'Expected "<algorithm>:<device_id>".',
|
||||
)
|
||||
return v
|
||||
|
||||
one_time_keys: Optional[Mapping[StrictStr, Union[StrictStr, KeyObject]]] = None
|
||||
"""
|
||||
One-time public keys for "pre-key" messages. The names of the properties
|
||||
should be in the format `<algorithm>:<key_id>`.
|
||||
|
||||
The format of the key is determined by the key algorithm, see:
|
||||
https://spec.matrix.org/v1.16/client-server-api/#key-algorithms.
|
||||
"""
|
||||
|
||||
@validator("one_time_keys", pre=True)
|
||||
def validate_one_time_keys(cls: Self, v: Any) -> Any:
|
||||
if v is None:
|
||||
return v
|
||||
if not isinstance(v, dict):
|
||||
raise TypeError("one_time_keys must be a mapping")
|
||||
|
||||
for k, _ in v.items():
|
||||
if not len(k.split(":")) == 2:
|
||||
raise SynapseError(
|
||||
code=HTTPStatus.BAD_REQUEST,
|
||||
errcode=Codes.BAD_JSON,
|
||||
msg=f"Invalid one_time_keys key {k!r}. "
|
||||
'Expected "<algorithm>:<device_id>".',
|
||||
)
|
||||
return v
|
||||
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, device_id: Optional[str]
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
# Parse the request body. Validate separately, as the handler expects a
|
||||
# plain dict, rather than any parsed object.
|
||||
#
|
||||
# Note: It would be nice to work with a parsed object, but the handler
|
||||
# needs to encode portions of the request body as canonical JSON before
|
||||
# storing the result in the DB. There's little point in converted to a
|
||||
# parsed object and then back to a dict.
|
||||
body = parse_json_object_from_request(request)
|
||||
validate_json_object(body, self.KeyUploadRequestBody)
|
||||
|
||||
if device_id is not None:
|
||||
# Providing the device_id should only be done for setting keys
|
||||
@@ -270,31 +149,8 @@ class KeyUploadServlet(RestServlet):
|
||||
400, "To upload keys, you must pass device_id when authenticating"
|
||||
)
|
||||
|
||||
if "device_keys" in body:
|
||||
# Validate the provided `user_id` and `device_id` fields in
|
||||
# `device_keys` match that of the requesting user. We can't do
|
||||
# this directly in the pydantic model as we don't have access
|
||||
# to the requester yet.
|
||||
#
|
||||
# TODO: We could use ValidationInfo when we switch to Pydantic v2.
|
||||
# https://docs.pydantic.dev/latest/concepts/validators/#validation-info
|
||||
if body["device_keys"]["user_id"] != user_id:
|
||||
raise SynapseError(
|
||||
code=HTTPStatus.BAD_REQUEST,
|
||||
errcode=Codes.BAD_JSON,
|
||||
msg="Provided `user_id` in `device_keys` does not match that of the authenticated user",
|
||||
)
|
||||
if body["device_keys"]["device_id"] != device_id:
|
||||
raise SynapseError(
|
||||
code=HTTPStatus.BAD_REQUEST,
|
||||
errcode=Codes.BAD_JSON,
|
||||
msg="Provided `device_id` in `device_keys` does not match that of the authenticated user device",
|
||||
)
|
||||
|
||||
result = await self.e2e_keys_handler.upload_keys_for_user(
|
||||
user_id=user_id,
|
||||
device_id=device_id,
|
||||
keys=body,
|
||||
user_id=user_id, device_id=device_id, keys=body
|
||||
)
|
||||
|
||||
return 200, result
|
||||
|
||||
@@ -363,6 +363,9 @@ class SyncRestServlet(RestServlet):
|
||||
|
||||
# https://github.com/matrix-org/matrix-doc/blob/54255851f642f84a4f1aaf7bc063eebe3d76752b/proposals/2732-olm-fallback-keys.md
|
||||
# states that this field should always be included, as long as the server supports the feature.
|
||||
response["org.matrix.msc2732.device_unused_fallback_key_types"] = (
|
||||
sync_result.device_unused_fallback_key_types
|
||||
)
|
||||
response["device_unused_fallback_key_types"] = (
|
||||
sync_result.device_unused_fallback_key_types
|
||||
)
|
||||
|
||||
@@ -299,10 +299,14 @@ class DataStore(
|
||||
FROM users as u
|
||||
LEFT JOIN profiles AS p ON u.name = p.full_user_id
|
||||
LEFT JOIN erased_users AS eu ON u.name = eu.user_id
|
||||
LEFT JOIN (
|
||||
SELECT user_id, MAX(last_seen) AS last_seen_ts
|
||||
FROM devices GROUP BY user_id
|
||||
) lsd ON u.name = lsd.user_id
|
||||
LEFT JOIN (
|
||||
SELECT user_id, MAX(last_seen) AS last_seen_ts
|
||||
FROM user_ips GROUP BY user_id
|
||||
) ls ON u.name = ls.user_id
|
||||
) lsi ON u.name = lsi.user_id
|
||||
{where_clause}
|
||||
"""
|
||||
sql = "SELECT COUNT(*) as total_users " + sql_base
|
||||
@@ -312,7 +316,8 @@ class DataStore(
|
||||
sql = f"""
|
||||
SELECT name, user_type, is_guest, admin, deactivated, shadow_banned,
|
||||
displayname, avatar_url, creation_ts * 1000 as creation_ts, approved,
|
||||
eu.user_id is not null as erased, last_seen_ts, locked
|
||||
eu.user_id is not null as erased,
|
||||
COALESCE(lsd.last_seen_ts, lsi.last_seen_ts) as last_seen_ts, locked
|
||||
{sql_base}
|
||||
ORDER BY {order_by_column} {order}, u.name ASC
|
||||
LIMIT ? OFFSET ?
|
||||
|
||||
@@ -354,7 +354,10 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
||||
if d is not None and d.keys is not None
|
||||
)
|
||||
|
||||
for batch in batch_iter(signature_query, 50):
|
||||
# 1000 is an arbitrary batch size. It helped performance on a very
|
||||
# large-scale deployment (matrix.org), but has not been tested against
|
||||
# any other setup.
|
||||
for batch in batch_iter(signature_query, 1000):
|
||||
cross_sigs_result = (
|
||||
await self._get_e2e_cross_signing_signatures_for_devices(batch)
|
||||
)
|
||||
|
||||
@@ -304,7 +304,7 @@ class AuthTestCase(unittest.HomeserverTestCase):
|
||||
|
||||
failure = self.get_failure(self.auth.get_user_by_req(request), AuthError)
|
||||
self.assertEqual(failure.value.code, 400)
|
||||
self.assertEqual(failure.value.errcode, Codes.EXCLUSIVE)
|
||||
self.assertEqual(failure.value.errcode, Codes.UNKNOWN_DEVICE)
|
||||
|
||||
def test_get_user_by_req__puppeted_token__not_tracking_puppeted_mau(self) -> None:
|
||||
self.store.get_user_by_access_token = AsyncMock(
|
||||
|
||||
@@ -410,6 +410,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase):
|
||||
device_id = "xyz"
|
||||
fallback_key = {"alg1:k1": "fallback_key1"}
|
||||
fallback_key2 = {"alg1:k2": "fallback_key2"}
|
||||
fallback_key3 = {"alg1:k2": "fallback_key3"}
|
||||
otk = {"alg1:k2": "key2"}
|
||||
|
||||
# we shouldn't have any unused fallback keys yet
|
||||
@@ -530,6 +531,28 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase):
|
||||
{"failures": {}, "one_time_keys": {local_user: {device_id: fallback_key2}}},
|
||||
)
|
||||
|
||||
# using the unstable prefix should also set the fallback key
|
||||
self.get_success(
|
||||
self.handler.upload_keys_for_user(
|
||||
local_user,
|
||||
device_id,
|
||||
{"org.matrix.msc2732.fallback_keys": fallback_key3},
|
||||
)
|
||||
)
|
||||
|
||||
claim_res = self.get_success(
|
||||
self.handler.claim_one_time_keys(
|
||||
{local_user: {device_id: {"alg1": 1}}},
|
||||
self.requester,
|
||||
timeout=None,
|
||||
always_include_fallback_keys=False,
|
||||
)
|
||||
)
|
||||
self.assertEqual(
|
||||
claim_res,
|
||||
{"failures": {}, "one_time_keys": {local_user: {device_id: fallback_key3}}},
|
||||
)
|
||||
|
||||
def test_fallback_key_bulk(self) -> None:
|
||||
"""Like test_fallback_key, but claims multiple keys in one handler call."""
|
||||
alice = f"@alice:{self.hs.hostname}"
|
||||
|
||||
@@ -16,6 +16,7 @@ from typing import Literal, Union
|
||||
|
||||
from twisted.internet.testing import MemoryReactor
|
||||
|
||||
from synapse.api.constants import EventContentFields, EventTypes
|
||||
from synapse.config.server import DEFAULT_ROOM_VERSION
|
||||
from synapse.rest import admin, login, room, room_upgrade_rest_servlet
|
||||
from synapse.server import HomeServer
|
||||
@@ -51,8 +52,8 @@ class SpamCheckerTestCase(HomeserverTestCase):
|
||||
|
||||
return channel
|
||||
|
||||
def test_may_user_create_room(self) -> None:
|
||||
"""Test that the may_user_create_room callback is called when a user
|
||||
def test_user_may_create_room(self) -> None:
|
||||
"""Test that the user_may_create_room callback is called when a user
|
||||
creates a room, and that it receives the correct parameters.
|
||||
"""
|
||||
|
||||
@@ -67,16 +68,50 @@ class SpamCheckerTestCase(HomeserverTestCase):
|
||||
user_may_create_room=user_may_create_room
|
||||
)
|
||||
|
||||
channel = self.create_room({"foo": "baa"})
|
||||
expected_room_config = {"foo": "baa"}
|
||||
channel = self.create_room(expected_room_config)
|
||||
|
||||
self.assertEqual(channel.code, 200)
|
||||
self.assertEqual(self.last_user_id, self.user_id)
|
||||
self.assertEqual(self.last_room_config["foo"], "baa")
|
||||
self.assertEqual(self.last_room_config, expected_room_config)
|
||||
|
||||
def test_may_user_create_room_on_upgrade(self) -> None:
|
||||
"""Test that the may_user_create_room callback is called when a room is upgraded."""
|
||||
def test_user_may_create_room_with_initial_state(self) -> None:
|
||||
"""Test that the user_may_create_room callback is called when a user
|
||||
creates a room with some initial state events, and that it receives the correct parameters.
|
||||
"""
|
||||
|
||||
async def user_may_create_room(
|
||||
user_id: str, room_config: JsonDict
|
||||
) -> Union[Literal["NOT_SPAM"], Codes]:
|
||||
self.last_room_config = room_config
|
||||
self.last_user_id = user_id
|
||||
return "NOT_SPAM"
|
||||
|
||||
self._module_api.register_spam_checker_callbacks(
|
||||
user_may_create_room=user_may_create_room
|
||||
)
|
||||
|
||||
expected_room_config = {
|
||||
"foo": "baa",
|
||||
"initial_state": [
|
||||
{
|
||||
"type": EventTypes.Topic,
|
||||
"content": {EventContentFields.TOPIC: "foo"},
|
||||
}
|
||||
],
|
||||
}
|
||||
channel = self.create_room(expected_room_config)
|
||||
|
||||
self.assertEqual(channel.code, 200)
|
||||
self.assertEqual(self.last_user_id, self.user_id)
|
||||
self.assertEqual(self.last_room_config, expected_room_config)
|
||||
|
||||
def test_user_may_create_room_on_upgrade(self) -> None:
|
||||
"""Test that the user_may_create_room callback is called when a room is upgraded."""
|
||||
|
||||
# First, create a room to upgrade.
|
||||
channel = self.create_room({"topic": "foo"})
|
||||
channel = self.create_room({EventContentFields.TOPIC: "foo"})
|
||||
|
||||
self.assertEqual(channel.code, 200)
|
||||
room_id = channel.json_body["room_id"]
|
||||
|
||||
@@ -107,13 +142,15 @@ class SpamCheckerTestCase(HomeserverTestCase):
|
||||
# Check that the initial state received by callback contains the topic event.
|
||||
self.assertTrue(
|
||||
any(
|
||||
event[0][0] == "m.room.topic" and event[1].get("topic") == "foo"
|
||||
event.get("type") == EventTypes.Topic
|
||||
and event.get("state_key") == ""
|
||||
and event.get("content").get(EventContentFields.TOPIC) == "foo"
|
||||
for event in self.last_room_config["initial_state"]
|
||||
)
|
||||
)
|
||||
|
||||
def test_may_user_create_room_disallowed(self) -> None:
|
||||
"""Test that the codes response from may_user_create_room callback is respected
|
||||
def test_user_may_create_room_disallowed(self) -> None:
|
||||
"""Test that the codes response from user_may_create_room callback is respected
|
||||
and returned via the API.
|
||||
"""
|
||||
|
||||
@@ -128,14 +165,16 @@ class SpamCheckerTestCase(HomeserverTestCase):
|
||||
user_may_create_room=user_may_create_room
|
||||
)
|
||||
|
||||
channel = self.create_room({"foo": "baa"})
|
||||
expected_room_config = {"foo": "baa"}
|
||||
channel = self.create_room(expected_room_config)
|
||||
|
||||
self.assertEqual(channel.code, 403)
|
||||
self.assertEqual(channel.json_body["errcode"], Codes.UNAUTHORIZED)
|
||||
self.assertEqual(self.last_user_id, self.user_id)
|
||||
self.assertEqual(self.last_room_config["foo"], "baa")
|
||||
self.assertEqual(self.last_room_config, expected_room_config)
|
||||
|
||||
def test_may_user_create_room_compatibility(self) -> None:
|
||||
"""Test that the may_user_create_room callback is called when a user
|
||||
def test_user_may_create_room_compatibility(self) -> None:
|
||||
"""Test that the user_may_create_room callback is called when a user
|
||||
creates a room for a module that uses the old callback signature
|
||||
(without the `room_config` parameter)
|
||||
"""
|
||||
@@ -151,6 +190,7 @@ class SpamCheckerTestCase(HomeserverTestCase):
|
||||
)
|
||||
|
||||
channel = self.create_room({"foo": "baa"})
|
||||
|
||||
self.assertEqual(channel.code, 200)
|
||||
self.assertEqual(self.last_user_id, self.user_id)
|
||||
|
||||
@@ -178,6 +218,7 @@ class SpamCheckerTestCase(HomeserverTestCase):
|
||||
)
|
||||
|
||||
channel = self.create_room({})
|
||||
|
||||
self.assertEqual(channel.code, 200)
|
||||
|
||||
room_id = channel.json_body["room_id"]
|
||||
@@ -222,6 +263,7 @@ class SpamCheckerTestCase(HomeserverTestCase):
|
||||
)
|
||||
|
||||
channel = self.create_room({})
|
||||
|
||||
self.assertEqual(channel.code, 200)
|
||||
|
||||
room_id = channel.json_body["room_id"]
|
||||
|
||||
@@ -29,8 +29,9 @@ from twisted.web.resource import Resource
|
||||
|
||||
import synapse.rest.admin
|
||||
from synapse.api.errors import Codes
|
||||
from synapse.media._base import FileInfo
|
||||
from synapse.media.filepath import MediaFilePaths
|
||||
from synapse.rest.client import login, profile, room
|
||||
from synapse.rest.client import login, media, profile, room
|
||||
from synapse.server import HomeServer
|
||||
from synapse.util.clock import Clock
|
||||
|
||||
@@ -47,6 +48,7 @@ class _AdminMediaTests(unittest.HomeserverTestCase):
|
||||
synapse.rest.admin.register_servlets,
|
||||
synapse.rest.admin.register_servlets_for_media_repo,
|
||||
login.register_servlets,
|
||||
media.register_servlets,
|
||||
]
|
||||
|
||||
def create_resource_dict(self) -> Dict[str, Resource]:
|
||||
@@ -55,6 +57,164 @@ class _AdminMediaTests(unittest.HomeserverTestCase):
|
||||
return resources
|
||||
|
||||
|
||||
class QueryMediaByIDTestCase(_AdminMediaTests):
|
||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||
self.hs = hs
|
||||
self.clock = clock
|
||||
self.server_name = hs.hostname
|
||||
self.store = hs.get_datastores().main
|
||||
|
||||
self.admin_user = self.register_user("admin", "pass", admin=True)
|
||||
self.admin_user_tok = self.login("admin", "pass")
|
||||
|
||||
def _cache_remote_media(self, file_id: str) -> None:
|
||||
file_info = FileInfo(server_name="remote.com", file_id=file_id)
|
||||
|
||||
media_storage = self.hs.get_media_repository().media_storage
|
||||
|
||||
ctx = media_storage.store_into_file(file_info)
|
||||
(f, fname) = self.get_success(ctx.__aenter__())
|
||||
f.write(SMALL_PNG)
|
||||
self.get_success(ctx.__aexit__(None, None, None))
|
||||
|
||||
self.get_success(
|
||||
self.store.store_cached_remote_media(
|
||||
origin="remote.com",
|
||||
media_id=file_id,
|
||||
media_type="image/png",
|
||||
media_length=len(SMALL_PNG),
|
||||
time_now_ms=self.clock.time_msec(),
|
||||
upload_name="test.png",
|
||||
filesystem_id=file_id,
|
||||
sha256=file_id,
|
||||
)
|
||||
)
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_matrix/client/v1/media/download/remote.com/{file_id}",
|
||||
shorthand=False,
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
# Should be successful
|
||||
self.assertEqual(
|
||||
200,
|
||||
channel.code,
|
||||
msg=("Expected to receive a 200 on accessing media"),
|
||||
)
|
||||
|
||||
def test_no_auth(self) -> None:
|
||||
"""
|
||||
Try to query media without authentication.
|
||||
"""
|
||||
url = f"/_synapse/admin/v1/media/{self.server_name}/12345"
|
||||
channel = self.make_request("GET", url)
|
||||
|
||||
self.assertEqual(
|
||||
401,
|
||||
channel.code,
|
||||
msg=channel.json_body,
|
||||
)
|
||||
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
|
||||
|
||||
def test_requester_is_no_admin(self) -> None:
|
||||
"""
|
||||
If the user is not a server admin, an error is returned.
|
||||
"""
|
||||
self.other_user = self.register_user("user", "pass")
|
||||
self.other_user_token = self.login("user", "pass")
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/media/{self.server_name}/12345",
|
||||
access_token=self.other_user_token,
|
||||
)
|
||||
|
||||
self.assertEqual(403, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
|
||||
|
||||
def test_local_media_does_not_exist(self) -> None:
|
||||
"""
|
||||
Tests that a lookup for local media that does not exist returns a 404
|
||||
"""
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/media/{self.server_name}/12345",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(404, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
|
||||
|
||||
def test_remote_media_does_not_exist(self) -> None:
|
||||
"""
|
||||
Tests that a lookup for remote media that is not cached returns a 404
|
||||
"""
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/media/{self.server_name}/12345",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(404, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
|
||||
|
||||
def test_query_local_media(self) -> None:
|
||||
"""
|
||||
Tests that querying an existing local media returns appropriate media info
|
||||
"""
|
||||
|
||||
# Upload some media into the room
|
||||
response = self.helper.upload_media(
|
||||
SMALL_PNG,
|
||||
tok=self.admin_user_tok,
|
||||
expect_code=200,
|
||||
)
|
||||
# Extract media ID from the response
|
||||
server_and_media_id = response["content_uri"][6:] # Cut off 'mxc://'
|
||||
server_name, media_id = server_and_media_id.split("/")
|
||||
self.assertEqual(server_name, self.server_name)
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/media/{self.server_name}/{media_id}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(channel.json_body["media_info"]["authenticated"], True)
|
||||
self.assertEqual(channel.json_body["media_info"]["media_id"], media_id)
|
||||
self.assertEqual(
|
||||
channel.json_body["media_info"]["media_length"], len(SMALL_PNG)
|
||||
)
|
||||
self.assertEqual(
|
||||
channel.json_body["media_info"]["media_type"], "application/json"
|
||||
)
|
||||
self.assertEqual(channel.json_body["media_info"]["upload_name"], "test.png")
|
||||
self.assertEqual(channel.json_body["media_info"]["user_id"], "@admin:test")
|
||||
|
||||
def test_query_remote_media(self) -> None:
|
||||
file_id = "abcdefg12345"
|
||||
self._cache_remote_media(file_id)
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/media/remote.com/{file_id}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(channel.json_body["media_info"]["authenticated"], True)
|
||||
self.assertEqual(channel.json_body["media_info"]["media_id"], file_id)
|
||||
self.assertEqual(
|
||||
channel.json_body["media_info"]["media_length"], len(SMALL_PNG)
|
||||
)
|
||||
self.assertEqual(channel.json_body["media_info"]["media_type"], "image/png")
|
||||
self.assertEqual(channel.json_body["media_info"]["upload_name"], "test.png")
|
||||
self.assertEqual(channel.json_body["media_info"]["media_origin"], "remote.com")
|
||||
|
||||
|
||||
class DeleteMediaByIDTestCase(_AdminMediaTests):
|
||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||
self.server_name = hs.hostname
|
||||
@@ -710,8 +870,8 @@ class QuarantineMediaByIDTestCase(_AdminMediaTests):
|
||||
self.assertFalse(channel.json_body)
|
||||
|
||||
# Test that ALL similar media was quarantined.
|
||||
for media in [self.media_id, self.media_id_2, self.media_id_3]:
|
||||
media_info = self.get_success(self.store.get_local_media(media))
|
||||
for media_item in [self.media_id, self.media_id_2, self.media_id_3]:
|
||||
media_info = self.get_success(self.store.get_local_media(media_item))
|
||||
assert media_info is not None
|
||||
self.assertTrue(media_info.quarantined_by)
|
||||
|
||||
@@ -731,8 +891,8 @@ class QuarantineMediaByIDTestCase(_AdminMediaTests):
|
||||
self.assertFalse(channel.json_body)
|
||||
|
||||
# Test that ALL similar media is now reset.
|
||||
for media in [self.media_id, self.media_id_2, self.media_id_3]:
|
||||
media_info = self.get_success(self.store.get_local_media(media))
|
||||
for media_item in [self.media_id, self.media_id_2, self.media_id_3]:
|
||||
media_info = self.get_success(self.store.get_local_media(media_item))
|
||||
assert media_info is not None
|
||||
self.assertFalse(media_info.quarantined_by)
|
||||
|
||||
|
||||
@@ -40,127 +40,6 @@ from tests.unittest import override_config
|
||||
from tests.utils import HAS_AUTHLIB
|
||||
|
||||
|
||||
class KeyUploadTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
keys.register_servlets,
|
||||
admin.register_servlets_for_client_rest_resource,
|
||||
login.register_servlets,
|
||||
]
|
||||
|
||||
def test_upload_keys_fails_on_invalid_structure(self) -> None:
|
||||
"""Check that we validate the structure of keys upon upload.
|
||||
|
||||
Regression test for https://github.com/element-hq/synapse/pull/17097
|
||||
"""
|
||||
self.register_user("alice", "wonderland")
|
||||
alice_token = self.login("alice", "wonderland")
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
"/_matrix/client/v3/keys/upload",
|
||||
{
|
||||
# Error: device_keys must be a dict
|
||||
"device_keys": ["some", "stuff", "weewoo"]
|
||||
},
|
||||
alice_token,
|
||||
)
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"],
|
||||
Codes.BAD_JSON,
|
||||
channel.result,
|
||||
)
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
"/_matrix/client/v3/keys/upload",
|
||||
{
|
||||
# Error: properties of fallback_keys must be in the form `<algorithm>:<device_id>`
|
||||
"fallback_keys": {"invalid_key": "signature_base64"}
|
||||
},
|
||||
alice_token,
|
||||
)
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"],
|
||||
Codes.BAD_JSON,
|
||||
channel.result,
|
||||
)
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
"/_matrix/client/v3/keys/upload",
|
||||
{
|
||||
# Same as above, but for one_time_keys
|
||||
"one_time_keys": {"invalid_key": "signature_base64"}
|
||||
},
|
||||
alice_token,
|
||||
)
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"],
|
||||
Codes.BAD_JSON,
|
||||
channel.result,
|
||||
)
|
||||
|
||||
def test_upload_keys_fails_on_invalid_user_id_or_device_id(self) -> None:
|
||||
"""
|
||||
Validate that the requesting user is uploading their own keys and nobody
|
||||
else's.
|
||||
"""
|
||||
device_id = "DEVICE_ID"
|
||||
alice_user_id = self.register_user("alice", "wonderland")
|
||||
alice_token = self.login("alice", "wonderland", device_id=device_id)
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
"/_matrix/client/v3/keys/upload",
|
||||
{
|
||||
"device_keys": {
|
||||
# Included `user_id` does not match requesting user.
|
||||
"user_id": "@unknown_user:test",
|
||||
"device_id": device_id,
|
||||
"algorithms": ["m.olm.curve25519-aes-sha2"],
|
||||
"keys": {
|
||||
f"ed25519:{device_id}": "publickey",
|
||||
},
|
||||
"signatures": {},
|
||||
}
|
||||
},
|
||||
alice_token,
|
||||
)
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"],
|
||||
Codes.BAD_JSON,
|
||||
channel.result,
|
||||
)
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
"/_matrix/client/v3/keys/upload",
|
||||
{
|
||||
"device_keys": {
|
||||
"user_id": alice_user_id,
|
||||
# Included `device_id` does not match requesting user's.
|
||||
"device_id": "UNKNOWN_DEVICE_ID",
|
||||
"algorithms": ["m.olm.curve25519-aes-sha2"],
|
||||
"keys": {
|
||||
f"ed25519:{device_id}": "publickey",
|
||||
},
|
||||
"signatures": {},
|
||||
}
|
||||
},
|
||||
alice_token,
|
||||
)
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"],
|
||||
Codes.BAD_JSON,
|
||||
channel.result,
|
||||
)
|
||||
|
||||
|
||||
class KeyQueryTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
keys.register_servlets,
|
||||
|
||||
@@ -249,6 +249,204 @@ class LoggingContextTestCase(unittest.TestCase):
|
||||
# Back to the sentinel context
|
||||
self._check_test_key("sentinel")
|
||||
|
||||
@logcontext_clean
|
||||
async def test_deferred_callback_await_in_current_logcontext(self) -> None:
|
||||
"""
|
||||
Test that calling the deferred callback in the current logcontext ("foo") and
|
||||
waiting for it to finish in a logcontext blocks works as expected.
|
||||
|
||||
Works because "always await your awaitables".
|
||||
|
||||
Demonstrates one pattern that we can use fix the naive case where we just call
|
||||
`d.callback(None)` without anything else. See the *Deferred callbacks* section
|
||||
of docs/log_contexts.md for more details.
|
||||
"""
|
||||
clock = Clock(reactor)
|
||||
|
||||
# Sanity check that we start in the sentinel context
|
||||
self._check_test_key("sentinel")
|
||||
|
||||
callback_finished = False
|
||||
|
||||
async def competing_callback() -> None:
|
||||
nonlocal callback_finished
|
||||
try:
|
||||
# The deferred callback should have the same logcontext as the caller
|
||||
self._check_test_key("foo")
|
||||
|
||||
with LoggingContext("competing"):
|
||||
await clock.sleep(0)
|
||||
self._check_test_key("competing")
|
||||
|
||||
self._check_test_key("foo")
|
||||
finally:
|
||||
# When exceptions happen, we still want to mark the callback as finished
|
||||
# so that the test can complete and we see the underlying error.
|
||||
callback_finished = True
|
||||
|
||||
with LoggingContext("foo"):
|
||||
d: defer.Deferred[None] = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
self._check_test_key("foo")
|
||||
d.callback(None)
|
||||
# The fix for the naive case is here (i.e. things don't work correctly if we
|
||||
# don't await here).
|
||||
#
|
||||
# Wait for `d` to finish before continuing so the "main" logcontext is
|
||||
# still active. This works because `d` already follows our logcontext
|
||||
# rules. If not, we would also have to use `make_deferred_yieldable(d)`.
|
||||
await d
|
||||
self._check_test_key("foo")
|
||||
|
||||
await clock.sleep(0)
|
||||
|
||||
self.assertTrue(
|
||||
callback_finished,
|
||||
"Callback never finished which means the test probably didn't wait long enough",
|
||||
)
|
||||
|
||||
# Back to the sentinel context
|
||||
self._check_test_key("sentinel")
|
||||
|
||||
@logcontext_clean
|
||||
async def test_deferred_callback_preserve_logging_context(self) -> None:
|
||||
"""
|
||||
Test that calling the deferred callback inside `PreserveLoggingContext()` (in
|
||||
the sentinel context) works as expected.
|
||||
|
||||
Demonstrates one pattern that we can use fix the naive case where we just call
|
||||
`d.callback(None)` without anything else. See the *Deferred callbacks* section
|
||||
of docs/log_contexts.md for more details.
|
||||
"""
|
||||
clock = Clock(reactor)
|
||||
|
||||
# Sanity check that we start in the sentinel context
|
||||
self._check_test_key("sentinel")
|
||||
|
||||
callback_finished = False
|
||||
|
||||
async def competing_callback() -> None:
|
||||
nonlocal callback_finished
|
||||
try:
|
||||
# The deferred callback should have the same logcontext as the caller
|
||||
self._check_test_key("sentinel")
|
||||
|
||||
with LoggingContext("competing"):
|
||||
await clock.sleep(0)
|
||||
self._check_test_key("competing")
|
||||
|
||||
self._check_test_key("sentinel")
|
||||
finally:
|
||||
# When exceptions happen, we still want to mark the callback as finished
|
||||
# so that the test can complete and we see the underlying error.
|
||||
callback_finished = True
|
||||
|
||||
with LoggingContext("foo"):
|
||||
d: defer.Deferred[None] = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
self._check_test_key("foo")
|
||||
# The fix for the naive case is here (i.e. things don't work correctly if we
|
||||
# don't `PreserveLoggingContext()` here).
|
||||
#
|
||||
# `PreserveLoggingContext` will reset the logcontext to the sentinel before
|
||||
# calling the callback, and restore the "foo" logcontext afterwards before
|
||||
# continuing the foo block. This solves the problem because when the
|
||||
# "competing" logcontext exits, it will restore the sentinel logcontext
|
||||
# which is never finished by its nature, so there is no warning and no
|
||||
# leakage into the reactor.
|
||||
with PreserveLoggingContext():
|
||||
d.callback(None)
|
||||
self._check_test_key("foo")
|
||||
|
||||
await clock.sleep(0)
|
||||
|
||||
self.assertTrue(
|
||||
callback_finished,
|
||||
"Callback never finished which means the test probably didn't wait long enough",
|
||||
)
|
||||
|
||||
# Back to the sentinel context
|
||||
self._check_test_key("sentinel")
|
||||
|
||||
@logcontext_clean
|
||||
async def test_deferred_callback_fire_and_forget_with_current_context(self) -> None:
|
||||
"""
|
||||
Test that it's possible to call the deferred callback with the current context
|
||||
while fire-and-forgetting the callback (no adverse effects like leaking the
|
||||
logcontext into the reactor or restarting an already finished logcontext).
|
||||
|
||||
Demonstrates one pattern that we can use fix the naive case where we just call
|
||||
`d.callback(None)` without anything else. See the *Deferred callbacks* section
|
||||
of docs/log_contexts.md for more details.
|
||||
"""
|
||||
clock = Clock(reactor)
|
||||
|
||||
# Sanity check that we start in the sentinel context
|
||||
self._check_test_key("sentinel")
|
||||
|
||||
callback_finished = False
|
||||
|
||||
async def competing_callback() -> None:
|
||||
nonlocal callback_finished
|
||||
try:
|
||||
# The deferred callback should have the same logcontext as the caller
|
||||
self._check_test_key("foo")
|
||||
|
||||
with LoggingContext("competing"):
|
||||
await clock.sleep(0)
|
||||
self._check_test_key("competing")
|
||||
|
||||
self._check_test_key("foo")
|
||||
finally:
|
||||
# When exceptions happen, we still want to mark the callback as finished
|
||||
# so that the test can complete and we see the underlying error.
|
||||
callback_finished = True
|
||||
|
||||
# Part of fix for the naive case is here (i.e. things don't work correctly if we
|
||||
# don't `PreserveLoggingContext(...)` here).
|
||||
#
|
||||
# We can extend the lifetime of the "foo" logcontext is to avoid calling the
|
||||
# context manager lifetime methods of `LoggingContext` (`__enter__`/`__exit__`).
|
||||
# And we can still set the current logcontext by using `PreserveLoggingContext`
|
||||
# and passing in the "foo" logcontext.
|
||||
with PreserveLoggingContext(LoggingContext("foo")):
|
||||
d: defer.Deferred[None] = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
self._check_test_key("foo")
|
||||
# Other part of fix for the naive case is here (i.e. things don't work
|
||||
# correctly if we don't `run_in_background(...)` here).
|
||||
#
|
||||
# `run_in_background(...)` will run the whole lambda in the current
|
||||
# logcontext and it handles the magic behind the scenes of a) restoring the
|
||||
# calling logcontext before returning to the caller and b) resetting the
|
||||
# logcontext to the sentinel after the deferred completes and we yield
|
||||
# control back to the reactor to avoid leaking the logcontext into the
|
||||
# reactor.
|
||||
#
|
||||
# We're using a lambda here as a little trick so we can still get everything
|
||||
# to run in the "foo" logcontext, but return the deferred `d` itself so that
|
||||
# `run_in_background` will wait on that to complete before resetting the
|
||||
# logcontext to the sentinel.
|
||||
#
|
||||
# type-ignore[call-overload]: This appears like a mypy type inference bug. A
|
||||
# function that returns a deferred is exactly what `run_in_background`
|
||||
# expects.
|
||||
#
|
||||
# type-ignore[func-returns-value]: This appears like a mypy type inference
|
||||
# bug. We're always returning the deferred `d`.
|
||||
run_in_background(lambda: (d.callback(None), d)[1]) # type: ignore[call-overload, func-returns-value]
|
||||
self._check_test_key("foo")
|
||||
|
||||
await clock.sleep(0)
|
||||
|
||||
self.assertTrue(
|
||||
callback_finished,
|
||||
"Callback never finished which means the test probably didn't wait long enough",
|
||||
)
|
||||
|
||||
# Back to the sentinel context
|
||||
self._check_test_key("sentinel")
|
||||
|
||||
async def _test_run_in_background(self, function: Callable[[], object]) -> None:
|
||||
clock = Clock(reactor)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user