mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-07 01:20:16 +00:00
Compare commits
37 Commits
dmr/warn-m
...
anoa/docs_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
793a5bfd12 | ||
|
|
ba3fd54bad | ||
|
|
b2df0716bc | ||
|
|
75dff3dc98 | ||
|
|
01e625513a | ||
|
|
873d467976 | ||
|
|
96e0cdbc5a | ||
|
|
9ce51a47f6 | ||
|
|
aa5f5ede33 | ||
|
|
d66d68f917 | ||
|
|
c4514b97db | ||
|
|
77dee1b451 | ||
|
|
5938928c59 | ||
|
|
db2edf5a65 | ||
|
|
13e4386710 | ||
|
|
bf2fea8f7d | ||
|
|
ae7858f184 | ||
|
|
01dcf7532d | ||
|
|
7e6598bcf6 | ||
|
|
8f5d2823df | ||
|
|
8d156ec0ba | ||
|
|
57fac2a234 | ||
|
|
3ae56d125c | ||
|
|
0d9eaa19fd | ||
|
|
0b684b59e5 | ||
|
|
629aa51743 | ||
|
|
5d3509dfda | ||
|
|
5a320baa45 | ||
|
|
f282d5fc11 | ||
|
|
ce6ecdd4b4 | ||
|
|
78b99de7c2 | ||
|
|
5ef673de4f | ||
|
|
d743b25c8f | ||
|
|
30c8e7e408 | ||
|
|
6463244375 | ||
|
|
8a23bde823 | ||
|
|
e8d1ec0e92 |
28
.ci/scripts/record_available_doc_versions.py
Executable file
28
.ci/scripts/record_available_doc_versions.py
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env python3
|
||||
# This script will write a json file to $OUTPUT_FILE that contains the name of
|
||||
# each available Synapse version with documentation.
|
||||
#
|
||||
# This script assumes that any top-level directory in the "gh-pages" branch is
|
||||
# named after a documentation version and contains documentation website files.
|
||||
|
||||
import os.path
|
||||
import json
|
||||
|
||||
OUTPUT_FILE = "versions.json"
|
||||
|
||||
# Determine the list of Synapse versions that have documentation.
|
||||
doc_versions = []
|
||||
for filepath in os.listdir():
|
||||
if os.path.isdir(filepath):
|
||||
doc_versions.append(filepath)
|
||||
|
||||
# Record the documentation versions in a json file, such that the
|
||||
# frontend javascript is aware of what versions exist.
|
||||
to_write = {
|
||||
"versions": doc_versions,
|
||||
"default_version": "latest",
|
||||
}
|
||||
|
||||
# Write the file.
|
||||
with open(OUTPUT_FILE, "w") as f:
|
||||
f.write(json.dumps(to_write))
|
||||
28
.github/workflows/docs.yaml
vendored
28
.github/workflows/docs.yaml
vendored
@@ -14,7 +14,7 @@ on:
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
name: Build and deploy docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -63,3 +63,29 @@ jobs:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
list_available_versions:
|
||||
needs: pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Check out the current branch
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Save the script
|
||||
run: cp .ci/scripts/record_available_doc_versions.py /
|
||||
|
||||
- uses: actions/setup-python@v3
|
||||
|
||||
# Check out the gh-pages branch, which we'll be pushing the doc versions to
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
# Check out the gh-pages branch
|
||||
ref: 'gh-pages'
|
||||
|
||||
- name: Record the available documentation versions
|
||||
run: |
|
||||
# Download the script
|
||||
/record_available_doc_versions
|
||||
|
||||
3
.github/workflows/latest_deps.yml
vendored
3
.github/workflows/latest_deps.yml
vendored
@@ -32,12 +32,15 @@ jobs:
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.2.0b1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --no-dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
8
.github/workflows/tests.yml
vendored
8
.github/workflows/tests.yml
vendored
@@ -20,13 +20,9 @@ jobs:
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
lint:
|
||||
# This does a vanilla `poetry install` - no extras. I'm slightly anxious
|
||||
# that we might skip some typechecks on code that uses extras. However,
|
||||
# I think the right way to fix this is to mark any extras needed for
|
||||
# typechecking as development dependencies. To detect this, we ought to
|
||||
# turn up mypy's strictness: disallow unknown imports and be accept fewer
|
||||
# uses of `Any`.
|
||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
||||
with:
|
||||
typechecking-extras: "all"
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
2
.github/workflows/twisted_trunk.yml
vendored
2
.github/workflows/twisted_trunk.yml
vendored
@@ -24,6 +24,8 @@ jobs:
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
|
||||
trial:
|
||||
|
||||
16
CHANGES.md
16
CHANGES.md
@@ -1,3 +1,17 @@
|
||||
Synapse 1.59.0
|
||||
==============
|
||||
|
||||
The non-standard `m.login.jwt` login type has been removed from Synapse. It can be replaced with `org.matrix.login.jwt` for identical behaviour. This is only used if `jwt_config.enabled` is set to `true` in the configuration.
|
||||
|
||||
|
||||
Synapse 1.58.0 (2022-05-03)
|
||||
===========================
|
||||
|
||||
As of this release, the groups/communities feature in Synapse is now disabled by default. See [\#11584](https://github.com/matrix-org/synapse/issues/11584) for details. As mentioned in [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1580), this feature will be removed in Synapse 1.61.
|
||||
|
||||
No significant changes since 1.58.0rc2.
|
||||
|
||||
|
||||
Synapse 1.58.0rc2 (2022-04-26)
|
||||
==============================
|
||||
|
||||
@@ -19,8 +33,6 @@ Internal Changes
|
||||
Synapse 1.58.0rc1 (2022-04-26)
|
||||
==============================
|
||||
|
||||
As of this release, the groups/communities feature in Synapse is now disabled by default. See [\#11584](https://github.com/matrix-org/synapse/issues/11584) for details. As mentioned in [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1580), this feature will be removed in Synapse 1.61.
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
|
||||
10
README.rst
10
README.rst
@@ -55,7 +55,7 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
||||
generation of fully open and interoperable messaging and VoIP apps for the
|
||||
internet.
|
||||
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
team, written in Python 3/Twisted.
|
||||
|
||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||
@@ -294,13 +294,13 @@ directory of your choice::
|
||||
cd synapse
|
||||
|
||||
Synapse has a number of external dependencies. We maintain a fixed development
|
||||
environment using [poetry](https://python-poetry.org/). First, install poetry. We recommend
|
||||
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
|
||||
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
|
||||
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`
|
||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
|
||||
for other installation methods.) Then ask poetry to create a virtual environment
|
||||
from the project and install Synapse's dependencies::
|
||||
|
||||
@@ -309,11 +309,11 @@ from the project and install Synapse's dependencies::
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
|
||||
|
||||
poetry run ./demo/start.sh
|
||||
|
||||
(to stop, you can use `poetry run ./demo/stop.sh`)
|
||||
(to stop, you can use ``poetry run ./demo/stop.sh``)
|
||||
|
||||
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
||||
for more information.
|
||||
|
||||
1
changelog.d/12273.bugfix
Normal file
1
changelog.d/12273.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in Synapse v1.48.0 where latest thread reply provided failed to include the proper bundled aggregations.
|
||||
1
changelog.d/12356.misc
Normal file
1
changelog.d/12356.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix scripts-dev to pass typechecking.
|
||||
1
changelog.d/12406.feature
Normal file
1
changelog.d/12406.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add a module API to allow modules to change actions for existing push rules of local users.
|
||||
1
changelog.d/12480.misc
Normal file
1
changelog.d/12480.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use supervisord to supervise Postgres and Caddy in the Complement image to reduce restart time.
|
||||
1
changelog.d/12505.misc
Normal file
1
changelog.d/12505.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use `make_awaitable` instead of `defer.succeed` for return values of mocks in tests.
|
||||
1
changelog.d/12526.feature
Normal file
1
changelog.d/12526.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add new `enable_registration_token_3pid_bypass` configuration option to allow registrations via token as an alternative to verifying a 3pid.
|
||||
1
changelog.d/12531.misc
Normal file
1
changelog.d/12531.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused `# type: ignore`s.
|
||||
1
changelog.d/12556.misc
Normal file
1
changelog.d/12556.misc
Normal file
@@ -0,0 +1 @@
|
||||
Release script: confirm the commit to be tagged before tagging.
|
||||
1
changelog.d/12564.misc
Normal file
1
changelog.d/12564.misc
Normal file
@@ -0,0 +1 @@
|
||||
Consistently check if an object is a `frozendict`.
|
||||
1
changelog.d/12570.bugfix
Normal file
1
changelog.d/12570.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in Synapse 1.57 which could cause `Failed to calculate hosts in room` errors to be logged for outbound federation.
|
||||
1
changelog.d/12576.misc
Normal file
1
changelog.d/12576.misc
Normal file
@@ -0,0 +1 @@
|
||||
Allow unused `#type: ignore` comments in bleeding edge CI jobs.
|
||||
1
changelog.d/12579.doc
Normal file
1
changelog.d/12579.doc
Normal file
@@ -0,0 +1 @@
|
||||
Add missing linebreak to pipx install instructions.
|
||||
1
changelog.d/12580.bugfix
Normal file
1
changelog.d/12580.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long standing bug where status codes would almost always get logged as 200!, irrespective of the actual status code, when clients disconnect before a request has finished processing.
|
||||
1
changelog.d/12581.misc
Normal file
1
changelog.d/12581.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve docstrings for the receipts store.
|
||||
1
changelog.d/12582.misc
Normal file
1
changelog.d/12582.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use constants for read-receipts in tests.
|
||||
1
changelog.d/12587.misc
Normal file
1
changelog.d/12587.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add `@cancellable` decorator, for use on endpoint methods that can be cancelled when clients disconnect.
|
||||
1
changelog.d/12589.misc
Normal file
1
changelog.d/12589.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove special-case for `twisted` logger from default log config.
|
||||
1
changelog.d/12594.bugfix
Normal file
1
changelog.d/12594.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix race when persisting an event and deleting a room that could lead to outbound federation breaking.
|
||||
1
changelog.d/12596.removal
Normal file
1
changelog.d/12596.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove unstable identifiers from [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069).
|
||||
2
changelog.d/12597.removal
Normal file
2
changelog.d/12597.removal
Normal file
@@ -0,0 +1,2 @@
|
||||
Remove the unspecified `m.login.jwt` login type and the unstable `uk.half-shot.msc2778.login.application_service` from
|
||||
[MSC2778](https://github.com/matrix-org/matrix-doc/pull/2778).
|
||||
1
changelog.d/12608.misc
Normal file
1
changelog.d/12608.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove redundant lines of config from `mypy.ini`.
|
||||
1
changelog.d/12612.bugfix
Normal file
1
changelog.d/12612.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a typo in the announcement text generated by the Synapse release development script.
|
||||
1
changelog.d/12613.removal
Normal file
1
changelog.d/12613.removal
Normal file
@@ -0,0 +1 @@
|
||||
Synapse now requires at least Python 3.7.1 (up from 3.7.0), for compatibility with the latest Twisted trunk.
|
||||
1
changelog.d/12614.misc
Normal file
1
changelog.d/12614.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add extra debug logging to federation sender.
|
||||
1
changelog.d/12620.misc
Normal file
1
changelog.d/12620.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add a consistency check on events which we read from the database.
|
||||
1
changelog.d/12624.misc
Normal file
1
changelog.d/12624.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove use of constantly library and switch to enums for EventRedactBehaviour. Contributed by @andrewdoh.
|
||||
1
changelog.d/12627.doc
Normal file
1
changelog.d/12627.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fixes to the formatting of README.rst.
|
||||
6
debian/changelog
vendored
6
debian/changelog
vendored
@@ -1,3 +1,9 @@
|
||||
matrix-synapse-py3 (1.58.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.58.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 May 2022 10:52:58 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.58.0rc2.
|
||||
|
||||
@@ -20,6 +20,9 @@ RUN rm /etc/nginx/sites-enabled/default
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
|
||||
@@ -34,13 +34,16 @@ WORKDIR /data
|
||||
# Copy the caddy config
|
||||
COPY conf-workers/caddy.complement.json /root/caddy.json
|
||||
|
||||
COPY conf-workers/postgres.supervisord.conf /etc/supervisor/conf.d/postgres.conf
|
||||
COPY conf-workers/caddy.supervisord.conf /etc/supervisor/conf.d/caddy.conf
|
||||
|
||||
# Copy the entrypoint
|
||||
COPY conf-workers/start-complement-synapse-workers.sh /
|
||||
|
||||
# Expose caddy's listener ports
|
||||
EXPOSE 8008 8448
|
||||
|
||||
ENTRYPOINT /start-complement-synapse-workers.sh
|
||||
ENTRYPOINT ["/start-complement-synapse-workers.sh"]
|
||||
|
||||
# Update the healthcheck to have a shorter check interval
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
|
||||
7
docker/complement/conf-workers/caddy.supervisord.conf
Normal file
7
docker/complement/conf-workers/caddy.supervisord.conf
Normal file
@@ -0,0 +1,7 @@
|
||||
[program:caddy]
|
||||
command=/usr/local/bin/prefix-log /root/caddy run --config /root/caddy.json
|
||||
autorestart=unexpected
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
16
docker/complement/conf-workers/postgres.supervisord.conf
Normal file
16
docker/complement/conf-workers/postgres.supervisord.conf
Normal file
@@ -0,0 +1,16 @@
|
||||
[program:postgres]
|
||||
command=/usr/local/bin/prefix-log /usr/bin/pg_ctlcluster 13 main start --foreground
|
||||
|
||||
# Lower priority number = starts first
|
||||
priority=1
|
||||
|
||||
autorestart=unexpected
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
# Use 'Fast Shutdown' mode which aborts current transactions and closes connections quickly.
|
||||
# (Default (TERM) is 'Smart Shutdown' which stops accepting new connections but
|
||||
# lets existing connections close gracefully.)
|
||||
stopsignal=INT
|
||||
@@ -12,12 +12,6 @@ function log {
|
||||
# Replace the server name in the caddy config
|
||||
sed -i "s/{{ server_name }}/${SERVER_NAME}/g" /root/caddy.json
|
||||
|
||||
log "starting postgres"
|
||||
pg_ctlcluster 13 main start
|
||||
|
||||
log "starting caddy"
|
||||
/root/caddy start --config /root/caddy.json
|
||||
|
||||
# Set the server name of the homeserver
|
||||
export SYNAPSE_SERVER_NAME=${SERVER_NAME}
|
||||
|
||||
|
||||
@@ -2,11 +2,7 @@ version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
{% if worker_name %}
|
||||
format: '%(asctime)s - worker:{{ worker_name }} - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% else %}
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% endif %}
|
||||
|
||||
handlers:
|
||||
{% if LOG_FILE_PATH %}
|
||||
|
||||
@@ -171,7 +171,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
# Templates for sections that may be inserted multiple times in config files
|
||||
SUPERVISORD_PROCESS_CONFIG_BLOCK = """
|
||||
[program:synapse_{name}]
|
||||
command=/usr/local/bin/python -m {app} \
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {app} \
|
||||
--config-path="{config_path}" \
|
||||
--config-path=/conf/workers/shared.yaml \
|
||||
--config-path=/conf/workers/{name}.yaml
|
||||
|
||||
12
docker/prefix-log
Executable file
12
docker/prefix-log
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Prefixes all lines on stdout and stderr with the process name (as determined by
|
||||
# the SUPERVISOR_PROCESS_NAME env var, which is automatically set by Supervisor).
|
||||
#
|
||||
# Usage:
|
||||
# prefix-log command [args...]
|
||||
#
|
||||
|
||||
exec 1> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&1)
|
||||
exec 2> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&2)
|
||||
exec "$@"
|
||||
@@ -17,9 +17,6 @@ follows:
|
||||
}
|
||||
```
|
||||
|
||||
Note that the login type of `m.login.jwt` is supported, but is deprecated. This
|
||||
will be removed in a future version of Synapse.
|
||||
|
||||
The `token` field should include the JSON web token with the following claims:
|
||||
|
||||
* A claim that encodes the local part of the user ID is required. By default,
|
||||
|
||||
@@ -1323,6 +1323,12 @@ oembed:
|
||||
#
|
||||
#registration_requires_token: true
|
||||
|
||||
# Allow users to submit a token during registration to bypass any required 3pid
|
||||
# steps configured in `registrations_require_3pid`.
|
||||
# Defaults to false, requiring that registration tokens (if enabled) complete a 3pid flow.
|
||||
#
|
||||
#enable_registration_token_3pid_bypass: false
|
||||
|
||||
# If set, allows registration of standard or admin accounts by anyone who
|
||||
# has the shared secret, even if registration is otherwise disabled.
|
||||
#
|
||||
|
||||
@@ -62,13 +62,6 @@ loggers:
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
|
||||
twisted:
|
||||
# We send the twisted logging directly to the file handler,
|
||||
# to work around https://github.com/matrix-org/synapse/issues/3471
|
||||
# when using "buffer" logger. Use "console" to log to stderr instead.
|
||||
handlers: [file]
|
||||
propagate: false
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ See the following for how to decode the dense data available from the default lo
|
||||
| NNNN | Total time waiting for response to DB queries across all parallel DB work from this request |
|
||||
| OOOO | Count of DB transactions performed |
|
||||
| PPPP | Response body size |
|
||||
| QQQQ | Response status code (prefixed with ! if the socket was closed before the response was generated) |
|
||||
| QQQQ | Response status code<br/>Suffixed with `!` if the socket was closed before the response was generated.<br/>A `499!` status code indicates that Synapse also cancelled request processing after the socket was closed.<br/> |
|
||||
| RRRR | Request |
|
||||
| SSSS | User-agent |
|
||||
| TTTT | Events fetched from DB to service this request (note that this does not include events fetched from the cache) |
|
||||
|
||||
62
mypy.ini
62
mypy.ini
@@ -7,6 +7,7 @@ show_error_codes = True
|
||||
show_traceback = True
|
||||
mypy_path = stubs
|
||||
warn_unreachable = True
|
||||
warn_unused_ignores = True
|
||||
local_partial_types = True
|
||||
no_implicit_optional = True
|
||||
|
||||
@@ -23,10 +24,6 @@ files =
|
||||
# https://docs.python.org/3/library/re.html#re.X
|
||||
exclude = (?x)
|
||||
^(
|
||||
|scripts-dev/build_debian_packages.py
|
||||
|scripts-dev/federation_client.py
|
||||
|scripts-dev/release.py
|
||||
|
||||
|synapse/storage/databases/__init__.py
|
||||
|synapse/storage/databases/main/cache.py
|
||||
|synapse/storage/databases/main/devices.py
|
||||
@@ -134,6 +131,11 @@ disallow_untyped_defs = True
|
||||
[mypy-synapse.metrics.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.metrics._reactor_metrics]
|
||||
# This module imports select.epoll. That exists on Linux, but doesn't on macOS.
|
||||
# See https://github.com/matrix-org/synapse/pull/11771.
|
||||
warn_unused_ignores = False
|
||||
|
||||
[mypy-synapse.module_api.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
@@ -239,63 +241,26 @@ disallow_untyped_defs = True
|
||||
[mypy-authlib.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-bcrypt]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-canonicaljson]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-constantly]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-daemonize]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-h11]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hiredis]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hyperlink]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ijson.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-importlib_metadata.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jaeger_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-josepy.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jwt.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-nacl.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
# Note: WIP stubs available at
|
||||
# https://github.com/microsoft/python-type-stubs/tree/64934207f523ad6b611e6cfe039d85d7175d7d0d/netaddr
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-parameterized.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-phonenumbers.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-prometheus_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pymacaroons.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@@ -308,23 +273,14 @@ ignore_missing_imports = True
|
||||
[mypy-saml2.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-sentry_sdk]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-service_identity.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-signedjson.*]
|
||||
[mypy-srvlookup.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-treq.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-twisted.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-zope]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-incremental.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
27
poetry.lock
generated
27
poetry.lock
generated
@@ -309,14 +309,15 @@ smmap = ">=3.0.1,<6"
|
||||
|
||||
[[package]]
|
||||
name = "gitpython"
|
||||
version = "3.1.14"
|
||||
description = "Python Git Library"
|
||||
version = "3.1.27"
|
||||
description = "GitPython is a python library used to interact with Git repositories"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.4"
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
gitdb = ">=4.0.1,<5"
|
||||
typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""}
|
||||
|
||||
[[package]]
|
||||
name = "hiredis"
|
||||
@@ -1315,6 +1316,14 @@ category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-commonmark"
|
||||
version = "0.9.2"
|
||||
description = "Typing stubs for commonmark"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-cryptography"
|
||||
version = "3.3.15"
|
||||
@@ -1552,8 +1561,8 @@ url_preview = ["lxml"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "f482a4f594a165dfe01ce253a22510d5faf38647ab0dcebc35789350cafd9bf0"
|
||||
python-versions = "^3.7.1"
|
||||
content-hash = "2bda1a7cfc8cc02832b4a7d16bf7e1615cb05e0639bdb30688aadf692d851942"
|
||||
|
||||
[metadata.files]
|
||||
attrs = [
|
||||
@@ -1766,8 +1775,8 @@ gitdb = [
|
||||
{file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"},
|
||||
]
|
||||
gitpython = [
|
||||
{file = "GitPython-3.1.14-py3-none-any.whl", hash = "sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b"},
|
||||
{file = "GitPython-3.1.14.tar.gz", hash = "sha256:be27633e7509e58391f10207cd32b2a6cf5b908f92d9cd30da2e514e1137af61"},
|
||||
{file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"},
|
||||
{file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"},
|
||||
]
|
||||
hiredis = [
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"},
|
||||
@@ -2588,6 +2597,10 @@ types-bleach = [
|
||||
{file = "types-bleach-4.1.4.tar.gz", hash = "sha256:2d30c2c4fb6854088ac636471352c9a51bf6c089289800d2a8060820a01cd43a"},
|
||||
{file = "types_bleach-4.1.4-py3-none-any.whl", hash = "sha256:edffe173ed6d7b6f3543036a96204a9319c3bf6c3645917b14274e43f000cc9b"},
|
||||
]
|
||||
types-commonmark = [
|
||||
{file = "types-commonmark-0.9.2.tar.gz", hash = "sha256:b894b67750c52fd5abc9a40a9ceb9da4652a391d75c1b480bba9cef90f19fc86"},
|
||||
{file = "types_commonmark-0.9.2-py3-none-any.whl", hash = "sha256:56f20199a1f9a2924443211a0ef97f8b15a8a956a7f4e9186be6950bf38d6d02"},
|
||||
]
|
||||
types-cryptography = [
|
||||
{file = "types-cryptography-3.3.15.tar.gz", hash = "sha256:a7983a75a7b88a18f88832008f0ef140b8d1097888ec1a0824ec8fb7e105273b"},
|
||||
{file = "types_cryptography-3.3.15-py3-none-any.whl", hash = "sha256:d9b0dd5465d7898d400850e7f35e5518aa93a7e23d3e11757cd81b4777089046"},
|
||||
|
||||
@@ -54,7 +54,7 @@ skip_gitignore = true
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.58.0rc2"
|
||||
version = "1.58.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -100,7 +100,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.7"
|
||||
python = "^3.7.1"
|
||||
|
||||
# Mandatory Dependencies
|
||||
# ----------------------
|
||||
@@ -251,6 +251,7 @@ flake8 = "*"
|
||||
mypy = "==0.931"
|
||||
mypy-zope = "==0.3.5"
|
||||
types-bleach = ">=4.1.0"
|
||||
types-commonmark = ">=0.9.2"
|
||||
types-jsonschema = ">=3.2.0"
|
||||
types-opentracing = ">=2.4.2"
|
||||
types-Pillow = ">=8.3.4"
|
||||
@@ -270,7 +271,8 @@ idna = ">=2.5"
|
||||
|
||||
# The following are used by the release script
|
||||
click = "==8.1.0"
|
||||
GitPython = "==3.1.14"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
commonmark = "==0.9.1"
|
||||
pygithub = "==1.55"
|
||||
# The following are executed as commands by the release script.
|
||||
|
||||
@@ -17,7 +17,8 @@ import subprocess
|
||||
import sys
|
||||
import threading
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import Optional, Sequence
|
||||
from types import FrameType
|
||||
from typing import Collection, Optional, Sequence, Set
|
||||
|
||||
DISTS = (
|
||||
"debian:buster", # oldstable: EOL 2022-08
|
||||
@@ -41,15 +42,17 @@ projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
class Builder(object):
|
||||
def __init__(
|
||||
self, redirect_stdout=False, docker_build_args: Optional[Sequence[str]] = None
|
||||
self,
|
||||
redirect_stdout: bool = False,
|
||||
docker_build_args: Optional[Sequence[str]] = None,
|
||||
):
|
||||
self.redirect_stdout = redirect_stdout
|
||||
self._docker_build_args = tuple(docker_build_args or ())
|
||||
self.active_containers = set()
|
||||
self.active_containers: Set[str] = set()
|
||||
self._lock = threading.Lock()
|
||||
self._failed = False
|
||||
|
||||
def run_build(self, dist, skip_tests=False):
|
||||
def run_build(self, dist: str, skip_tests: bool = False) -> None:
|
||||
"""Build deb for a single distribution"""
|
||||
|
||||
if self._failed:
|
||||
@@ -63,7 +66,7 @@ class Builder(object):
|
||||
self._failed = True
|
||||
raise
|
||||
|
||||
def _inner_build(self, dist, skip_tests=False):
|
||||
def _inner_build(self, dist: str, skip_tests: bool = False) -> None:
|
||||
tag = dist.split(":", 1)[1]
|
||||
|
||||
# Make the dir where the debs will live.
|
||||
@@ -138,7 +141,7 @@ class Builder(object):
|
||||
stdout.close()
|
||||
print("Completed build of %s" % (dist,))
|
||||
|
||||
def kill_containers(self):
|
||||
def kill_containers(self) -> None:
|
||||
with self._lock:
|
||||
active = list(self.active_containers)
|
||||
|
||||
@@ -156,8 +159,10 @@ class Builder(object):
|
||||
self.active_containers.remove(c)
|
||||
|
||||
|
||||
def run_builds(builder, dists, jobs=1, skip_tests=False):
|
||||
def sig(signum, _frame):
|
||||
def run_builds(
|
||||
builder: Builder, dists: Collection[str], jobs: int = 1, skip_tests: bool = False
|
||||
) -> None:
|
||||
def sig(signum: int, _frame: Optional[FrameType]) -> None:
|
||||
print("Caught SIGINT")
|
||||
builder.kill_containers()
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ import argparse
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from typing import Any, Optional
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
@@ -47,13 +47,14 @@ import signedjson.types
|
||||
import srvlookup
|
||||
import yaml
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3 import HTTPConnectionPool
|
||||
|
||||
# uncomment the following to enable debug logging of http requests
|
||||
# from httplib import HTTPConnection
|
||||
# HTTPConnection.debuglevel = 1
|
||||
|
||||
|
||||
def encode_base64(input_bytes):
|
||||
def encode_base64(input_bytes: bytes) -> str:
|
||||
"""Encode bytes as a base64 string without any padding."""
|
||||
|
||||
input_len = len(input_bytes)
|
||||
@@ -63,7 +64,7 @@ def encode_base64(input_bytes):
|
||||
return output_string
|
||||
|
||||
|
||||
def encode_canonical_json(value):
|
||||
def encode_canonical_json(value: object) -> bytes:
|
||||
return json.dumps(
|
||||
value,
|
||||
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||
@@ -130,7 +131,7 @@ def request(
|
||||
sig,
|
||||
destination,
|
||||
)
|
||||
authorization_headers.append(header.encode("ascii"))
|
||||
authorization_headers.append(header)
|
||||
print("Authorization: %s" % header, file=sys.stderr)
|
||||
|
||||
dest = "matrix://%s%s" % (destination, path)
|
||||
@@ -139,7 +140,10 @@ def request(
|
||||
s = requests.Session()
|
||||
s.mount("matrix://", MatrixConnectionAdapter())
|
||||
|
||||
headers = {"Host": destination, "Authorization": authorization_headers[0]}
|
||||
headers: Dict[str, str] = {
|
||||
"Host": destination,
|
||||
"Authorization": authorization_headers[0],
|
||||
}
|
||||
|
||||
if method == "POST":
|
||||
headers["Content-Type"] = "application/json"
|
||||
@@ -154,7 +158,7 @@ def request(
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Signs and sends a federation request to a matrix homeserver"
|
||||
)
|
||||
@@ -212,6 +216,7 @@ def main():
|
||||
if not args.server_name or not args.signing_key:
|
||||
read_args_from_config(args)
|
||||
|
||||
assert isinstance(args.signing_key, str)
|
||||
algorithm, version, key_base64 = args.signing_key.split()
|
||||
key = signedjson.key.decode_signing_key_base64(algorithm, version, key_base64)
|
||||
|
||||
@@ -233,7 +238,7 @@ def main():
|
||||
print("")
|
||||
|
||||
|
||||
def read_args_from_config(args):
|
||||
def read_args_from_config(args: argparse.Namespace) -> None:
|
||||
with open(args.config, "r") as fh:
|
||||
config = yaml.safe_load(fh)
|
||||
|
||||
@@ -250,7 +255,7 @@ def read_args_from_config(args):
|
||||
|
||||
class MatrixConnectionAdapter(HTTPAdapter):
|
||||
@staticmethod
|
||||
def lookup(s, skip_well_known=False):
|
||||
def lookup(s: str, skip_well_known: bool = False) -> Tuple[str, int]:
|
||||
if s[-1] == "]":
|
||||
# ipv6 literal (with no port)
|
||||
return s, 8448
|
||||
@@ -276,7 +281,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
return s, 8448
|
||||
|
||||
@staticmethod
|
||||
def get_well_known(server_name):
|
||||
def get_well_known(server_name: str) -> Optional[str]:
|
||||
uri = "https://%s/.well-known/matrix/server" % (server_name,)
|
||||
print("fetching %s" % (uri,), file=sys.stderr)
|
||||
|
||||
@@ -299,7 +304,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
print("Invalid response from %s: %s" % (uri, e), file=sys.stderr)
|
||||
return None
|
||||
|
||||
def get_connection(self, url, proxies=None):
|
||||
def get_connection(
|
||||
self, url: str, proxies: Optional[Dict[str, str]] = None
|
||||
) -> HTTPConnectionPool:
|
||||
parsed = urlparse.urlparse(url)
|
||||
|
||||
(host, port) = self.lookup(parsed.netloc)
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
can crop up, e.g the cache descriptors.
|
||||
"""
|
||||
|
||||
from typing import Callable, Optional
|
||||
from typing import Callable, Optional, Type
|
||||
|
||||
from mypy.nodes import ARG_NAMED_OPT
|
||||
from mypy.plugin import MethodSigContext, Plugin
|
||||
@@ -94,7 +94,7 @@ def cached_function_method_signature(ctx: MethodSigContext) -> CallableType:
|
||||
return signature
|
||||
|
||||
|
||||
def plugin(version: str):
|
||||
def plugin(version: str) -> Type[SynapsePlugin]:
|
||||
# This is the entry point of the plugin, and let's us deal with the fact
|
||||
# that the mypy plugin interface is *not* stable by looking at the version
|
||||
# string.
|
||||
|
||||
@@ -25,7 +25,7 @@ import sys
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import List, Optional
|
||||
from typing import Any, List, Optional, cast
|
||||
|
||||
import attr
|
||||
import click
|
||||
@@ -36,7 +36,9 @@ from github import Github
|
||||
from packaging import version
|
||||
|
||||
|
||||
def run_until_successful(command, *args, **kwargs):
|
||||
def run_until_successful(
|
||||
command: str, *args: Any, **kwargs: Any
|
||||
) -> subprocess.CompletedProcess:
|
||||
while True:
|
||||
completed_process = subprocess.run(command, *args, **kwargs)
|
||||
exit_code = completed_process.returncode
|
||||
@@ -50,7 +52,7 @@ def run_until_successful(command, *args, **kwargs):
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
def cli() -> None:
|
||||
"""An interactive script to walk through the parts of creating a release.
|
||||
|
||||
Requires the dev dependencies be installed, which can be done via:
|
||||
@@ -81,19 +83,13 @@ def cli():
|
||||
|
||||
|
||||
@cli.command()
|
||||
def prepare():
|
||||
def prepare() -> None:
|
||||
"""Do the initial stages of creating a release, including creating release
|
||||
branch, updating changelog and pushing to GitHub.
|
||||
"""
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
try:
|
||||
repo = git.Repo()
|
||||
except git.InvalidGitRepositoryError:
|
||||
raise click.ClickException("Not in Synapse repo.")
|
||||
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException("Uncommitted changes exist.")
|
||||
repo = get_repo_and_check_clean_checkout()
|
||||
|
||||
click.secho("Updating git repo...")
|
||||
repo.remote().fetch()
|
||||
@@ -161,22 +157,21 @@ def prepare():
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Switch to the release branch.
|
||||
parsed_new_version: version.Version = version.parse(new_version)
|
||||
# Cast safety: parse() won't return a version.LegacyVersion from our
|
||||
# version string format.
|
||||
parsed_new_version = cast(version.Version, version.parse(new_version))
|
||||
|
||||
# We assume for debian changelogs that we only do RCs or full releases.
|
||||
assert not parsed_new_version.is_devrelease
|
||||
assert not parsed_new_version.is_postrelease
|
||||
|
||||
release_branch_name = (
|
||||
f"release-v{parsed_new_version.major}.{parsed_new_version.minor}"
|
||||
)
|
||||
release_branch_name = get_release_branch_name(parsed_new_version)
|
||||
release_branch = find_ref(repo, release_branch_name)
|
||||
if release_branch:
|
||||
if release_branch.is_remote():
|
||||
# If the release branch only exists on the remote we check it out
|
||||
# locally.
|
||||
repo.git.checkout(release_branch_name)
|
||||
release_branch = repo.active_branch
|
||||
else:
|
||||
# If a branch doesn't exist we create one. We ask which one branch it
|
||||
# should be based off, defaulting to sensible values depending on the
|
||||
@@ -198,13 +193,15 @@ def prepare():
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Check out the base branch and ensure it's up to date
|
||||
repo.head.reference = base_branch
|
||||
repo.head.set_reference(base_branch, "check out the base branch")
|
||||
repo.head.reset(index=True, working_tree=True)
|
||||
if not base_branch.is_remote():
|
||||
update_branch(repo)
|
||||
|
||||
# Create the new release branch
|
||||
release_branch = repo.create_head(release_branch_name, commit=base_branch)
|
||||
# Type ignore will no longer be needed after GitPython 3.1.28.
|
||||
# See https://github.com/gitpython-developers/GitPython/pull/1419
|
||||
repo.create_head(release_branch_name, commit=base_branch) # type: ignore[arg-type]
|
||||
|
||||
# Switch to the release branch and ensure it's up to date.
|
||||
repo.git.checkout(release_branch_name)
|
||||
@@ -265,17 +262,11 @@ def prepare():
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
||||
def tag(gh_token: Optional[str]):
|
||||
def tag(gh_token: Optional[str]) -> None:
|
||||
"""Tags the release and generates a draft GitHub release"""
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
try:
|
||||
repo = git.Repo()
|
||||
except git.InvalidGitRepositoryError:
|
||||
raise click.ClickException("Not in Synapse repo.")
|
||||
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException("Uncommitted changes exist.")
|
||||
repo = get_repo_and_check_clean_checkout()
|
||||
|
||||
click.secho("Updating git repo...")
|
||||
repo.remote().fetch()
|
||||
@@ -288,12 +279,26 @@ def tag(gh_token: Optional[str]):
|
||||
if tag_name in repo.tags:
|
||||
raise click.ClickException(f"Tag {tag_name} already exists!\n")
|
||||
|
||||
# Check we're on the right release branch
|
||||
release_branch = get_release_branch_name(current_version)
|
||||
if repo.active_branch.name != release_branch:
|
||||
click.echo(
|
||||
f"Need to be on the release branch ({release_branch}) before tagging. "
|
||||
f"Currently on ({repo.active_branch.name})."
|
||||
)
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Get the appropriate changelogs and tag.
|
||||
changes = get_changes_for_version(current_version)
|
||||
|
||||
click.echo_via_pager(changes)
|
||||
if click.confirm("Edit text?", default=False):
|
||||
changes = click.edit(changes, require_save=False)
|
||||
edited_changes = click.edit(changes, require_save=False)
|
||||
# This assert is for mypy's benefit. click's docs are a little unclear, but
|
||||
# when `require_save=False`, not saving the temp file in the editor returns
|
||||
# the original string.
|
||||
assert edited_changes is not None
|
||||
changes = edited_changes
|
||||
|
||||
repo.create_tag(tag_name, message=changes, sign=True)
|
||||
|
||||
@@ -347,22 +352,16 @@ def tag(gh_token: Optional[str]):
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
||||
def publish(gh_token: str):
|
||||
"""Publish release."""
|
||||
def publish(gh_token: str) -> None:
|
||||
"""Publish release on GitHub."""
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
try:
|
||||
repo = git.Repo()
|
||||
except git.InvalidGitRepositoryError:
|
||||
raise click.ClickException("Not in Synapse repo.")
|
||||
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException("Uncommitted changes exist.")
|
||||
get_repo_and_check_clean_checkout()
|
||||
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
|
||||
if not click.confirm(f"Publish {tag_name}?", default=True):
|
||||
if not click.confirm(f"Publish release {tag_name} on GitHub?", default=True):
|
||||
return
|
||||
|
||||
# Publish the draft release
|
||||
@@ -390,12 +389,19 @@ def publish(gh_token: str):
|
||||
|
||||
|
||||
@cli.command()
|
||||
def upload():
|
||||
def upload() -> None:
|
||||
"""Upload release to pypi."""
|
||||
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
|
||||
# Check we have the right tag checked out.
|
||||
repo = get_repo_and_check_clean_checkout()
|
||||
tag = repo.tag(f"refs/tags/{tag_name}")
|
||||
if repo.head.commit != tag.commit:
|
||||
click.echo("Tag {tag_name} (tag.commit) is not currently checked out!")
|
||||
click.get_current_context().abort()
|
||||
|
||||
pypi_asset_names = [
|
||||
f"matrix_synapse-{current_version}-py3-none-any.whl",
|
||||
f"matrix-synapse-{current_version}.tar.gz",
|
||||
@@ -418,7 +424,7 @@ def upload():
|
||||
|
||||
|
||||
@cli.command()
|
||||
def announce():
|
||||
def announce() -> None:
|
||||
"""Generate markdown to announce the release."""
|
||||
|
||||
current_version = get_package_version()
|
||||
@@ -428,7 +434,7 @@ def announce():
|
||||
f"""
|
||||
Hi everyone. Synapse {current_version} has just been released.
|
||||
|
||||
[notes](https://github.com/matrix-org/synapse/releases/tag/{tag_name}) |\
|
||||
[notes](https://github.com/matrix-org/synapse/releases/tag/{tag_name}) | \
|
||||
[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \
|
||||
[debs](https://packages.matrix.org/debian/) | \
|
||||
[pypi](https://pypi.org/project/matrix-synapse/{current_version}/)"""
|
||||
@@ -459,20 +465,36 @@ def get_package_version() -> version.Version:
|
||||
return version.Version(version_string)
|
||||
|
||||
|
||||
def get_release_branch_name(version_number: version.Version) -> str:
|
||||
return f"release-v{version_number.major}.{version_number.minor}"
|
||||
|
||||
|
||||
def get_repo_and_check_clean_checkout() -> git.Repo:
|
||||
"""Get the project repo and check it's not got any uncommitted changes."""
|
||||
try:
|
||||
repo = git.Repo()
|
||||
except git.InvalidGitRepositoryError:
|
||||
raise click.ClickException("Not in Synapse repo.")
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException("Uncommitted changes exist.")
|
||||
return repo
|
||||
|
||||
|
||||
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
||||
"""Find the branch/ref, looking first locally then in the remote."""
|
||||
if ref_name in repo.refs:
|
||||
return repo.refs[ref_name]
|
||||
if ref_name in repo.references:
|
||||
return repo.references[ref_name]
|
||||
elif ref_name in repo.remote().refs:
|
||||
return repo.remote().refs[ref_name]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def update_branch(repo: git.Repo):
|
||||
def update_branch(repo: git.Repo) -> None:
|
||||
"""Ensure branch is up to date if it has a remote"""
|
||||
if repo.active_branch.tracking_branch():
|
||||
repo.git.merge(repo.active_branch.tracking_branch().name)
|
||||
tracking_branch = repo.active_branch.tracking_branch()
|
||||
if tracking_branch:
|
||||
repo.git.merge(tracking_branch.name)
|
||||
|
||||
|
||||
def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
@@ -536,7 +558,9 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
return "\n".join(version_changelog)
|
||||
|
||||
|
||||
def generate_and_write_changelog(current_version: version.Version, new_version: str):
|
||||
def generate_and_write_changelog(
|
||||
current_version: version.Version, new_version: str
|
||||
) -> None:
|
||||
# We do this by getting a draft so that we can edit it before writing to the
|
||||
# changelog.
|
||||
result = run_until_successful(
|
||||
@@ -558,8 +582,8 @@ def generate_and_write_changelog(current_version: version.Version, new_version:
|
||||
f.write(existing_content)
|
||||
|
||||
# Remove all the news fragments
|
||||
for f in glob.iglob("changelog.d/*.*"):
|
||||
os.remove(f)
|
||||
for filename in glob.iglob("changelog.d/*.*"):
|
||||
os.remove(filename)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -27,7 +27,7 @@ from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||
from synapse.util import json_encoder
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Adds a signature to a JSON object.
|
||||
|
||||
|
||||
@@ -115,9 +115,7 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
|
||||
def __getitem__(self, index: slice) -> List[_KT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedItemsView( # type: ignore
|
||||
ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]
|
||||
):
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ...
|
||||
|
||||
@@ -48,7 +48,6 @@ from twisted.logger import LoggingFile, LogLevel
|
||||
from twisted.protocols.tls import TLSMemoryBIOFactory
|
||||
from twisted.python.threadpool import ThreadPool
|
||||
|
||||
import synapse
|
||||
from synapse.api.constants import MAX_PDU_SIZE
|
||||
from synapse.app import check_bind_error
|
||||
from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
@@ -60,6 +59,7 @@ from synapse.events.spamcheck import load_legacy_spam_checkers
|
||||
from synapse.events.third_party_rules import load_legacy_third_party_event_rules
|
||||
from synapse.handlers.auth import load_legacy_password_auth_providers
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import install_gc_manager, register_threadpool
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from synapse.metrics.jemalloc import setup_jemalloc_stats
|
||||
@@ -431,7 +431,7 @@ async def start(hs: "HomeServer") -> None:
|
||||
refresh_certificate(hs)
|
||||
|
||||
# Start the tracer
|
||||
synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa
|
||||
init_tracer(hs) # noqa
|
||||
|
||||
# Instantiate the modules so they can register their web resources to the module API
|
||||
# before we start the listeners.
|
||||
|
||||
@@ -110,13 +110,6 @@ loggers:
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
|
||||
twisted:
|
||||
# We send the twisted logging directly to the file handler,
|
||||
# to work around https://github.com/matrix-org/synapse/issues/3471
|
||||
# when using "buffer" logger. Use "console" to log to stderr instead.
|
||||
handlers: [file]
|
||||
propagate: false
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
|
||||
|
||||
@@ -43,6 +43,9 @@ class RegistrationConfig(Config):
|
||||
self.registration_requires_token = config.get(
|
||||
"registration_requires_token", False
|
||||
)
|
||||
self.enable_registration_token_3pid_bypasss = config.get(
|
||||
"enable_registration_token_3pid_bypasss", False
|
||||
)
|
||||
self.registration_shared_secret = config.get("registration_shared_secret")
|
||||
|
||||
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
|
||||
@@ -309,6 +312,12 @@ class RegistrationConfig(Config):
|
||||
#
|
||||
#registration_requires_token: true
|
||||
|
||||
# Allow users to submit a token during registration to bypass any required 3pid
|
||||
# steps configured in `registrations_require_3pid`.
|
||||
# Defaults to false, requiring that registration tokens (if enabled) complete a 3pid flow.
|
||||
#
|
||||
#enable_registration_token_3pid_bypass: false
|
||||
|
||||
# If set, allows registration of standard or admin accounts by anyone who
|
||||
# has the shared secret, even if registration is otherwise disabled.
|
||||
#
|
||||
|
||||
@@ -186,7 +186,7 @@ KNOWN_RESOURCES = {
|
||||
class HttpResourceConfig:
|
||||
names: List[str] = attr.ib(
|
||||
factory=list,
|
||||
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), # type: ignore
|
||||
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)),
|
||||
)
|
||||
compress: bool = attr.ib(
|
||||
default=False,
|
||||
@@ -231,9 +231,7 @@ class ManholeConfig:
|
||||
class LimitRemoteRoomsConfig:
|
||||
enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
|
||||
complexity: Union[float, int] = attr.ib(
|
||||
validator=attr.validators.instance_of(
|
||||
(float, int) # type: ignore[arg-type] # noqa
|
||||
),
|
||||
validator=attr.validators.instance_of((float, int)), # noqa
|
||||
default=1.0,
|
||||
)
|
||||
complexity_error: str = attr.ib(
|
||||
|
||||
@@ -213,10 +213,17 @@ class _EventInternalMetadata:
|
||||
return self.outlier
|
||||
|
||||
def is_out_of_band_membership(self) -> bool:
|
||||
"""Whether this is an out of band membership, like an invite or an invite
|
||||
rejection. This is needed as those events are marked as outliers, but
|
||||
they still need to be processed as if they're new events (e.g. updating
|
||||
invite state in the database, relaying to clients, etc).
|
||||
"""Whether this event is an out-of-band membership.
|
||||
|
||||
OOB memberships are a special case of outlier events: they are membership events
|
||||
for federated rooms that we aren't full members of. Examples include invites
|
||||
received over federation, and rejections for such invites.
|
||||
|
||||
The concept of an OOB membership is needed because these events need to be
|
||||
processed as if they're new regular events (e.g. updating membership state in
|
||||
the database, relaying to clients via /sync, etc) despite being outliers.
|
||||
|
||||
See also https://matrix-org.github.io/synapse/develop/development/room-dag-concepts.html#out-of-band-membership-events.
|
||||
|
||||
(Added in synapse 0.99.0, so may be unreliable for events received before that)
|
||||
"""
|
||||
|
||||
@@ -27,7 +27,6 @@ from typing import (
|
||||
)
|
||||
|
||||
import attr
|
||||
from frozendict import frozendict
|
||||
|
||||
from synapse.api.constants import EventContentFields, EventTypes, RelationTypes
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
@@ -204,7 +203,9 @@ def _copy_field(src: JsonDict, dst: JsonDict, field: List[str]) -> None:
|
||||
key_to_move = field.pop(-1)
|
||||
sub_dict = src
|
||||
for sub_field in field: # e.g. sub_field => "content"
|
||||
if sub_field in sub_dict and type(sub_dict[sub_field]) in [dict, frozendict]:
|
||||
if sub_field in sub_dict and isinstance(
|
||||
sub_dict[sub_field], collections.abc.Mapping
|
||||
):
|
||||
sub_dict = sub_dict[sub_field]
|
||||
else:
|
||||
return
|
||||
@@ -425,13 +426,12 @@ class EventClientSerializer:
|
||||
|
||||
# Check if there are any bundled aggregations to include with the event.
|
||||
if bundle_aggregations:
|
||||
event_aggregations = bundle_aggregations.get(event.event_id)
|
||||
if event_aggregations:
|
||||
if event.event_id in bundle_aggregations:
|
||||
self._inject_bundled_aggregations(
|
||||
event,
|
||||
time_now,
|
||||
config,
|
||||
event_aggregations,
|
||||
bundle_aggregations,
|
||||
serialized_event,
|
||||
apply_edits=apply_edits,
|
||||
)
|
||||
@@ -470,7 +470,7 @@ class EventClientSerializer:
|
||||
event: EventBase,
|
||||
time_now: int,
|
||||
config: SerializeEventConfig,
|
||||
aggregations: "BundledAggregations",
|
||||
bundled_aggregations: Dict[str, "BundledAggregations"],
|
||||
serialized_event: JsonDict,
|
||||
apply_edits: bool,
|
||||
) -> None:
|
||||
@@ -480,22 +480,37 @@ class EventClientSerializer:
|
||||
event: The event being serialized.
|
||||
time_now: The current time in milliseconds
|
||||
config: Event serialization config
|
||||
aggregations: The bundled aggregation to serialize.
|
||||
bundled_aggregations: Bundled aggregations to be injected.
|
||||
A map from event_id to aggregation data. Must contain at least an
|
||||
entry for `event`.
|
||||
|
||||
While serializing the bundled aggregations this map may be searched
|
||||
again for additional events in a recursive manner.
|
||||
serialized_event: The serialized event which may be modified.
|
||||
apply_edits: Whether the content of the event should be modified to reflect
|
||||
any replacement in `aggregations.replace`.
|
||||
"""
|
||||
|
||||
# We have already checked that aggregations exist for this event.
|
||||
event_aggregations = bundled_aggregations[event.event_id]
|
||||
|
||||
# The JSON dictionary to be added under the unsigned property of the event
|
||||
# being serialized.
|
||||
serialized_aggregations = {}
|
||||
|
||||
if aggregations.annotations:
|
||||
serialized_aggregations[RelationTypes.ANNOTATION] = aggregations.annotations
|
||||
if event_aggregations.annotations:
|
||||
serialized_aggregations[
|
||||
RelationTypes.ANNOTATION
|
||||
] = event_aggregations.annotations
|
||||
|
||||
if aggregations.references:
|
||||
serialized_aggregations[RelationTypes.REFERENCE] = aggregations.references
|
||||
if event_aggregations.references:
|
||||
serialized_aggregations[
|
||||
RelationTypes.REFERENCE
|
||||
] = event_aggregations.references
|
||||
|
||||
if aggregations.replace:
|
||||
if event_aggregations.replace:
|
||||
# If there is an edit, optionally apply it to the event.
|
||||
edit = aggregations.replace
|
||||
edit = event_aggregations.replace
|
||||
if apply_edits:
|
||||
self._apply_edit(event, serialized_event, edit)
|
||||
|
||||
@@ -506,19 +521,16 @@ class EventClientSerializer:
|
||||
"sender": edit.sender,
|
||||
}
|
||||
|
||||
# If this event is the start of a thread, include a summary of the replies.
|
||||
if aggregations.thread:
|
||||
thread = aggregations.thread
|
||||
# Include any threaded replies to this event.
|
||||
if event_aggregations.thread:
|
||||
thread = event_aggregations.thread
|
||||
|
||||
# Don't bundle aggregations as this could recurse forever.
|
||||
serialized_latest_event = serialize_event(
|
||||
thread.latest_event, time_now, config=config
|
||||
serialized_latest_event = self.serialize_event(
|
||||
thread.latest_event,
|
||||
time_now,
|
||||
config=config,
|
||||
bundle_aggregations=bundled_aggregations,
|
||||
)
|
||||
# Manually apply an edit, if one exists.
|
||||
if thread.latest_edit:
|
||||
self._apply_edit(
|
||||
thread.latest_event, serialized_latest_event, thread.latest_edit
|
||||
)
|
||||
|
||||
thread_summary = {
|
||||
"latest_event": serialized_latest_event,
|
||||
@@ -622,7 +634,7 @@ def validate_canonicaljson(value: Any) -> None:
|
||||
# Note that Infinity, -Infinity, and NaN are also considered floats.
|
||||
raise SynapseError(400, "Bad JSON value: float", Codes.BAD_JSON)
|
||||
|
||||
elif isinstance(value, (dict, frozendict)):
|
||||
elif isinstance(value, collections.abc.Mapping):
|
||||
for v in value.values():
|
||||
validate_canonicaljson(v)
|
||||
|
||||
|
||||
@@ -268,8 +268,8 @@ class FederationServer(FederationBase):
|
||||
transaction_id=transaction_id,
|
||||
destination=destination,
|
||||
origin=origin,
|
||||
origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore
|
||||
pdus=transaction_data.get("pdus"), # type: ignore
|
||||
origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore[arg-type]
|
||||
pdus=transaction_data.get("pdus"),
|
||||
edus=transaction_data.get("edus"),
|
||||
)
|
||||
|
||||
|
||||
@@ -343,9 +343,16 @@ class FederationSender(AbstractFederationSender):
|
||||
last_token, self._last_poked_id, limit=100
|
||||
)
|
||||
|
||||
logger.debug("Handling %s -> %s", last_token, next_token)
|
||||
logger.debug(
|
||||
"Handling %i -> %i: %i events to send (current id %i)",
|
||||
last_token,
|
||||
next_token,
|
||||
len(events),
|
||||
self._last_poked_id,
|
||||
)
|
||||
|
||||
if not events and next_token >= self._last_poked_id:
|
||||
logger.debug("All events processed")
|
||||
break
|
||||
|
||||
async def handle_event(event: EventBase) -> None:
|
||||
@@ -353,9 +360,53 @@ class FederationSender(AbstractFederationSender):
|
||||
send_on_behalf_of = event.internal_metadata.get_send_on_behalf_of()
|
||||
is_mine = self.is_mine_id(event.sender)
|
||||
if not is_mine and send_on_behalf_of is None:
|
||||
logger.debug("Not sending remote-origin event %s", event)
|
||||
return
|
||||
|
||||
# We also want to not send out-of-band membership events.
|
||||
#
|
||||
# OOB memberships are used in three (and a half) situations:
|
||||
#
|
||||
# (1) invite events which we have received over federation. Those
|
||||
# will have a `sender` on a different server, so will be
|
||||
# skipped by the "is_mine" test above anyway.
|
||||
#
|
||||
# (2) rejections of invites to federated rooms - either remotely
|
||||
# or locally generated. (Such rejections are normally
|
||||
# created via federation, in which case the remote server is
|
||||
# responsible for sending out the rejection. If that fails,
|
||||
# we'll create a leave event locally, but that's only really
|
||||
# for the benefit of the invited user - we don't have enough
|
||||
# information to send it out over federation).
|
||||
#
|
||||
# (2a) rescinded knocks. These are identical to rejected invites.
|
||||
#
|
||||
# (3) knock events which we have sent over federation. As with
|
||||
# invite rejections, the remote server should send them out to
|
||||
# the federation.
|
||||
#
|
||||
# So, in all the above cases, we want to ignore such events.
|
||||
#
|
||||
# OOB memberships are always(?) outliers anyway, so if we *don't*
|
||||
# ignore them, we'll get an exception further down when we try to
|
||||
# fetch the membership list for the room.
|
||||
#
|
||||
# Arguably, we could equivalently ignore all outliers here, since
|
||||
# in theory the only way for an outlier with a local `sender` to
|
||||
# exist is by being an OOB membership (via one of (2), (2a) or (3)
|
||||
# above).
|
||||
#
|
||||
if event.internal_metadata.is_out_of_band_membership():
|
||||
logger.debug("Not sending OOB membership event %s", event)
|
||||
return
|
||||
|
||||
# Finally, there are some other events that we should not send out
|
||||
# until someone asks for them. They are explicitly flagged as such
|
||||
# with `proactively_send: False`.
|
||||
if not event.internal_metadata.should_proactively_send():
|
||||
logger.debug(
|
||||
"Not sending event with proactively_send=false: %s", event
|
||||
)
|
||||
return
|
||||
|
||||
destinations: Optional[Set[str]] = None
|
||||
@@ -419,7 +470,10 @@ class FederationSender(AbstractFederationSender):
|
||||
"federation_sender"
|
||||
).observe((now - ts) / 1000)
|
||||
|
||||
async def handle_room_events(events: Iterable[EventBase]) -> None:
|
||||
async def handle_room_events(events: List[EventBase]) -> None:
|
||||
logger.debug(
|
||||
"Handling %i events in room %s", len(events), events[0].room_id
|
||||
)
|
||||
with Measure(self.clock, "handle_room_events"):
|
||||
for event in events:
|
||||
await handle_event(event)
|
||||
@@ -438,6 +492,7 @@ class FederationSender(AbstractFederationSender):
|
||||
)
|
||||
)
|
||||
|
||||
logger.debug("Successfully handled up to %i", next_token)
|
||||
await self.store.update_federation_out_pos("events", next_token)
|
||||
|
||||
if events:
|
||||
|
||||
@@ -229,21 +229,21 @@ class TransportLayerClient:
|
||||
"""
|
||||
logger.debug(
|
||||
"send_data dest=%s, txid=%s",
|
||||
transaction.destination, # type: ignore
|
||||
transaction.transaction_id, # type: ignore
|
||||
transaction.destination,
|
||||
transaction.transaction_id,
|
||||
)
|
||||
|
||||
if transaction.destination == self.server_name: # type: ignore
|
||||
if transaction.destination == self.server_name:
|
||||
raise RuntimeError("Transport layer cannot send to itself!")
|
||||
|
||||
# FIXME: This is only used by the tests. The actual json sent is
|
||||
# generated by the json_data_callback.
|
||||
json_data = transaction.get_dict()
|
||||
|
||||
path = _create_v1_path("/send/%s", transaction.transaction_id) # type: ignore
|
||||
path = _create_v1_path("/send/%s", transaction.transaction_id)
|
||||
|
||||
return await self.client.put_json(
|
||||
transaction.destination, # type: ignore
|
||||
transaction.destination,
|
||||
path=path,
|
||||
data=json_data,
|
||||
json_data_callback=json_data_callback,
|
||||
|
||||
@@ -481,7 +481,7 @@ class AuthHandler:
|
||||
sid = authdict["session"]
|
||||
|
||||
# Convert the URI and method to strings.
|
||||
uri = request.uri.decode("utf-8") # type: ignore
|
||||
uri = request.uri.decode("utf-8")
|
||||
method = request.method.decode("utf-8")
|
||||
|
||||
# If there's no session ID, create a new session.
|
||||
|
||||
@@ -164,7 +164,7 @@ class EventHandler:
|
||||
event.
|
||||
"""
|
||||
redact_behaviour = (
|
||||
EventRedactBehaviour.AS_IS if show_redacted else EventRedactBehaviour.REDACT
|
||||
EventRedactBehaviour.as_is if show_redacted else EventRedactBehaviour.redact
|
||||
)
|
||||
event = await self.store.get_event(
|
||||
event_id, check_room_id=room_id, redact_behaviour=redact_behaviour
|
||||
|
||||
@@ -316,7 +316,7 @@ class FederationHandler:
|
||||
|
||||
events_to_check = await self.store.get_events_as_list(
|
||||
event_ids_to_check,
|
||||
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
get_prev_content=False,
|
||||
)
|
||||
|
||||
@@ -1494,7 +1494,7 @@ class FederationHandler:
|
||||
|
||||
events = await self.store.get_events_as_list(
|
||||
batch,
|
||||
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
allow_rejected=True,
|
||||
)
|
||||
for event in events:
|
||||
|
||||
@@ -860,7 +860,7 @@ class FederationEventHandler:
|
||||
evs = await self._store.get_events(
|
||||
list(state_map.values()),
|
||||
get_prev_content=False,
|
||||
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
)
|
||||
event_map.update(evs)
|
||||
|
||||
|
||||
@@ -1407,7 +1407,7 @@ class EventCreationHandler:
|
||||
|
||||
original_event = await self.store.get_event(
|
||||
event.redacts,
|
||||
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
get_prev_content=False,
|
||||
allow_rejected=False,
|
||||
allow_none=True,
|
||||
@@ -1504,7 +1504,7 @@ class EventCreationHandler:
|
||||
|
||||
original_event = await self.store.get_event(
|
||||
event.redacts,
|
||||
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
get_prev_content=False,
|
||||
allow_rejected=False,
|
||||
allow_none=True,
|
||||
|
||||
@@ -966,7 +966,7 @@ class OidcProvider:
|
||||
"Mapping provider does not support de-duplicating Matrix IDs"
|
||||
)
|
||||
|
||||
attributes = await self._user_mapping_provider.map_user_attributes( # type: ignore
|
||||
attributes = await self._user_mapping_provider.map_user_attributes(
|
||||
userinfo, token
|
||||
)
|
||||
|
||||
|
||||
138
synapse/handlers/push_rules.py
Normal file
138
synapse/handlers/push_rules.py
Normal file
@@ -0,0 +1,138 @@
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import TYPE_CHECKING, List, Optional, Union
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.api.errors import SynapseError, UnrecognizedRequestError
|
||||
from synapse.push.baserules import BASE_RULE_IDS
|
||||
from synapse.storage.push_rule import RuleNotFoundException
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class RuleSpec:
|
||||
scope: str
|
||||
template: str
|
||||
rule_id: str
|
||||
attr: Optional[str]
|
||||
|
||||
|
||||
class PushRulesHandler:
|
||||
"""A class to handle changes in push rules for users."""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self._notifier = hs.get_notifier()
|
||||
self._main_store = hs.get_datastores().main
|
||||
|
||||
async def set_rule_attr(
|
||||
self, user_id: str, spec: RuleSpec, val: Union[bool, JsonDict]
|
||||
) -> None:
|
||||
"""Set an attribute (enabled or actions) on an existing push rule.
|
||||
|
||||
Notifies listeners (e.g. sync handler) of the change.
|
||||
|
||||
Args:
|
||||
user_id: the user for which to modify the push rule.
|
||||
spec: the spec of the push rule to modify.
|
||||
val: the value to change the attribute to.
|
||||
|
||||
Raises:
|
||||
RuleNotFoundException if the rule being modified doesn't exist.
|
||||
SynapseError(400) if the value is malformed.
|
||||
UnrecognizedRequestError if the attribute to change is unknown.
|
||||
InvalidRuleException if we're trying to change the actions on a rule but
|
||||
the provided actions aren't compliant with the spec.
|
||||
"""
|
||||
if spec.attr not in ("enabled", "actions"):
|
||||
# for the sake of potential future expansion, shouldn't report
|
||||
# 404 in the case of an unknown request so check it corresponds to
|
||||
# a known attribute first.
|
||||
raise UnrecognizedRequestError()
|
||||
|
||||
namespaced_rule_id = f"global/{spec.template}/{spec.rule_id}"
|
||||
rule_id = spec.rule_id
|
||||
is_default_rule = rule_id.startswith(".")
|
||||
if is_default_rule:
|
||||
if namespaced_rule_id not in BASE_RULE_IDS:
|
||||
raise RuleNotFoundException("Unknown rule %r" % (namespaced_rule_id,))
|
||||
if spec.attr == "enabled":
|
||||
if isinstance(val, dict) and "enabled" in val:
|
||||
val = val["enabled"]
|
||||
if not isinstance(val, bool):
|
||||
# Legacy fallback
|
||||
# This should *actually* take a dict, but many clients pass
|
||||
# bools directly, so let's not break them.
|
||||
raise SynapseError(400, "Value for 'enabled' must be boolean")
|
||||
await self._main_store.set_push_rule_enabled(
|
||||
user_id, namespaced_rule_id, val, is_default_rule
|
||||
)
|
||||
elif spec.attr == "actions":
|
||||
if not isinstance(val, dict):
|
||||
raise SynapseError(400, "Value must be a dict")
|
||||
actions = val.get("actions")
|
||||
if not isinstance(actions, list):
|
||||
raise SynapseError(400, "Value for 'actions' must be dict")
|
||||
check_actions(actions)
|
||||
rule_id = spec.rule_id
|
||||
is_default_rule = rule_id.startswith(".")
|
||||
if is_default_rule:
|
||||
if namespaced_rule_id not in BASE_RULE_IDS:
|
||||
raise RuleNotFoundException(
|
||||
"Unknown rule %r" % (namespaced_rule_id,)
|
||||
)
|
||||
await self._main_store.set_push_rule_actions(
|
||||
user_id, namespaced_rule_id, actions, is_default_rule
|
||||
)
|
||||
else:
|
||||
raise UnrecognizedRequestError()
|
||||
|
||||
self.notify_user(user_id)
|
||||
|
||||
def notify_user(self, user_id: str) -> None:
|
||||
"""Notify listeners about a push rule change.
|
||||
|
||||
Args:
|
||||
user_id: the user ID the change is for.
|
||||
"""
|
||||
stream_id = self._main_store.get_max_push_rules_stream_id()
|
||||
self._notifier.on_new_event("push_rules_key", stream_id, users=[user_id])
|
||||
|
||||
|
||||
def check_actions(actions: List[Union[str, JsonDict]]) -> None:
|
||||
"""Check if the given actions are spec compliant.
|
||||
|
||||
Args:
|
||||
actions: the actions to check.
|
||||
|
||||
Raises:
|
||||
InvalidRuleException if the rules aren't compliant with the spec.
|
||||
"""
|
||||
if not isinstance(actions, list):
|
||||
raise InvalidRuleException("No actions found")
|
||||
|
||||
for a in actions:
|
||||
if a in ["notify", "dont_notify", "coalesce"]:
|
||||
pass
|
||||
elif isinstance(a, dict) and "set_tweak" in a:
|
||||
pass
|
||||
else:
|
||||
raise InvalidRuleException("Unrecognised action %s" % a)
|
||||
|
||||
|
||||
class InvalidRuleException(Exception):
|
||||
pass
|
||||
@@ -11,6 +11,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import collections.abc
|
||||
import logging
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
@@ -24,7 +25,6 @@ from typing import (
|
||||
)
|
||||
|
||||
import attr
|
||||
from frozendict import frozendict
|
||||
|
||||
from synapse.api.constants import RelationTypes
|
||||
from synapse.api.errors import SynapseError
|
||||
@@ -44,8 +44,6 @@ logger = logging.getLogger(__name__)
|
||||
class _ThreadAggregation:
|
||||
# The latest event in the thread.
|
||||
latest_event: EventBase
|
||||
# The latest edit to the latest event in the thread.
|
||||
latest_edit: Optional[EventBase]
|
||||
# The total number of events in the thread.
|
||||
count: int
|
||||
# True if the current user has sent an event to the thread.
|
||||
@@ -295,7 +293,7 @@ class RelationsHandler:
|
||||
|
||||
for event_id, summary in summaries.items():
|
||||
if summary:
|
||||
thread_count, latest_thread_event, edit = summary
|
||||
thread_count, latest_thread_event = summary
|
||||
|
||||
# Subtract off the count of any ignored users.
|
||||
for ignored_user in ignored_users:
|
||||
@@ -340,7 +338,6 @@ class RelationsHandler:
|
||||
|
||||
results[event_id] = _ThreadAggregation(
|
||||
latest_event=latest_thread_event,
|
||||
latest_edit=edit,
|
||||
count=thread_count,
|
||||
# If there's a thread summary it must also exist in the
|
||||
# participated dictionary.
|
||||
@@ -359,8 +356,13 @@ class RelationsHandler:
|
||||
user_id: The user requesting the bundled aggregations.
|
||||
|
||||
Returns:
|
||||
A map of event ID to the bundled aggregation for the event. Not all
|
||||
events may have bundled aggregations in the results.
|
||||
A map of event ID to the bundled aggregations for the event.
|
||||
|
||||
Not all requested events may exist in the results (if they don't have
|
||||
bundled aggregations).
|
||||
|
||||
The results may include additional events which are related to the
|
||||
requested events.
|
||||
"""
|
||||
# De-duplicate events by ID to handle the same event requested multiple times.
|
||||
#
|
||||
@@ -369,22 +371,59 @@ class RelationsHandler:
|
||||
event.event_id: event for event in events if not event.is_state()
|
||||
}
|
||||
|
||||
# A map of event ID to the relation in that event, if there is one.
|
||||
relations_by_id: Dict[str, str] = {}
|
||||
for event_id, event in events_by_id.items():
|
||||
relates_to = event.content.get("m.relates_to")
|
||||
if isinstance(relates_to, collections.abc.Mapping):
|
||||
relation_type = relates_to.get("rel_type")
|
||||
if isinstance(relation_type, str):
|
||||
relations_by_id[event_id] = relation_type
|
||||
|
||||
# event ID -> bundled aggregation in non-serialized form.
|
||||
results: Dict[str, BundledAggregations] = {}
|
||||
|
||||
# Fetch any ignored users of the requesting user.
|
||||
ignored_users = await self._main_store.ignored_users(user_id)
|
||||
|
||||
# Threads are special as the latest event of a thread might cause additional
|
||||
# events to be fetched. Thus, we check those first!
|
||||
|
||||
# Fetch thread summaries (but only for the directly requested events).
|
||||
threads = await self.get_threads_for_events(
|
||||
# It is not valid to start a thread on an event which itself relates to another event.
|
||||
[eid for eid in events_by_id.keys() if eid not in relations_by_id],
|
||||
user_id,
|
||||
ignored_users,
|
||||
)
|
||||
for event_id, thread in threads.items():
|
||||
results.setdefault(event_id, BundledAggregations()).thread = thread
|
||||
|
||||
# If the latest event in a thread is not already being fetched,
|
||||
# add it. This ensures that the bundled aggregations for the
|
||||
# latest thread event is correct.
|
||||
latest_thread_event = thread.latest_event
|
||||
if latest_thread_event and latest_thread_event.event_id not in events_by_id:
|
||||
events_by_id[latest_thread_event.event_id] = latest_thread_event
|
||||
# Keep relations_by_id in sync with events_by_id:
|
||||
#
|
||||
# We know that the latest event in a thread has a thread relation
|
||||
# (as that is what makes it part of the thread).
|
||||
relations_by_id[latest_thread_event.event_id] = RelationTypes.THREAD
|
||||
|
||||
# Fetch other relations per event.
|
||||
for event in events_by_id.values():
|
||||
# Do not bundle aggregations for an event which represents an edit or an
|
||||
# annotation. It does not make sense for them to have related events.
|
||||
relates_to = event.content.get("m.relates_to")
|
||||
if isinstance(relates_to, (dict, frozendict)):
|
||||
relation_type = relates_to.get("rel_type")
|
||||
if relation_type in (RelationTypes.ANNOTATION, RelationTypes.REPLACE):
|
||||
continue
|
||||
# An event which is a replacement (ie edit) or annotation (ie, reaction)
|
||||
# may not have any other event related to it.
|
||||
#
|
||||
# XXX This is buggy, see https://github.com/matrix-org/synapse/issues/12566
|
||||
if relations_by_id.get(event.event_id) in (
|
||||
RelationTypes.ANNOTATION,
|
||||
RelationTypes.REPLACE,
|
||||
):
|
||||
continue
|
||||
|
||||
# Fetch any annotations (ie, reactions) to bundle with this event.
|
||||
annotations = await self.get_annotations_for_event(
|
||||
event.event_id, event.room_id, ignored_users=ignored_users
|
||||
)
|
||||
@@ -393,6 +432,7 @@ class RelationsHandler:
|
||||
event.event_id, BundledAggregations()
|
||||
).annotations = {"chunk": annotations}
|
||||
|
||||
# Fetch any references to bundle with this event.
|
||||
references, next_token = await self.get_relations_for_event(
|
||||
event.event_id,
|
||||
event,
|
||||
@@ -425,10 +465,4 @@ class RelationsHandler:
|
||||
for event_id, edit in edits.items():
|
||||
results.setdefault(event_id, BundledAggregations()).replace = edit
|
||||
|
||||
threads = await self.get_threads_for_events(
|
||||
events_by_id.keys(), user_id, ignored_users
|
||||
)
|
||||
for event_id, thread in threads.items():
|
||||
results.setdefault(event_id, BundledAggregations()).thread = thread
|
||||
|
||||
return results
|
||||
|
||||
@@ -357,7 +357,7 @@ class SearchHandler:
|
||||
itertools.chain(
|
||||
# The events_before and events_after for each context.
|
||||
itertools.chain.from_iterable(
|
||||
itertools.chain(context["events_before"], context["events_after"]) # type: ignore[arg-type]
|
||||
itertools.chain(context["events_before"], context["events_after"])
|
||||
for context in contexts.values()
|
||||
),
|
||||
# The returned events.
|
||||
@@ -373,10 +373,10 @@ class SearchHandler:
|
||||
|
||||
for context in contexts.values():
|
||||
context["events_before"] = self._event_serializer.serialize_events(
|
||||
context["events_before"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type]
|
||||
context["events_before"], time_now, bundle_aggregations=aggregations
|
||||
)
|
||||
context["events_after"] = self._event_serializer.serialize_events(
|
||||
context["events_after"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type]
|
||||
context["events_after"], time_now, bundle_aggregations=aggregations
|
||||
)
|
||||
|
||||
results = [
|
||||
|
||||
@@ -256,7 +256,9 @@ class RegistrationTokenAuthChecker(UserInteractiveAuthChecker):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.hs = hs
|
||||
self._enabled = bool(hs.config.registration.registration_requires_token)
|
||||
self._enabled = bool(
|
||||
hs.config.registration.registration_requires_token
|
||||
) or bool(hs.config.registration.enable_registration_token_3pid_bypasss)
|
||||
self.store = hs.get_datastores().main
|
||||
|
||||
def is_enabled(self) -> bool:
|
||||
|
||||
@@ -43,6 +43,7 @@ from typing_extensions import Protocol
|
||||
from zope.interface import implementer
|
||||
|
||||
from twisted.internet import defer, interfaces
|
||||
from twisted.internet.defer import CancelledError
|
||||
from twisted.python import failure
|
||||
from twisted.web import resource
|
||||
from twisted.web.server import NOT_DONE_YET, Request
|
||||
@@ -82,6 +83,14 @@ HTML_ERROR_TEMPLATE = """<!DOCTYPE html>
|
||||
</html>
|
||||
"""
|
||||
|
||||
# A fictional HTTP status code for requests where the client has disconnected and we
|
||||
# successfully cancelled the request. Used only for logging purposes. Clients will never
|
||||
# observe this code unless cancellations leak across requests or we raise a
|
||||
# `CancelledError` ourselves.
|
||||
# Analogous to nginx's 499 status code:
|
||||
# https://github.com/nginx/nginx/blob/release-1.21.6/src/http/ngx_http_request.h#L128-L134
|
||||
HTTP_STATUS_REQUEST_CANCELLED = 499
|
||||
|
||||
|
||||
def return_json_error(f: failure.Failure, request: SynapseRequest) -> None:
|
||||
"""Sends a JSON error response to clients."""
|
||||
@@ -93,6 +102,17 @@ def return_json_error(f: failure.Failure, request: SynapseRequest) -> None:
|
||||
error_dict = exc.error_dict()
|
||||
|
||||
logger.info("%s SynapseError: %s - %s", request, error_code, exc.msg)
|
||||
elif f.check(CancelledError):
|
||||
error_code = HTTP_STATUS_REQUEST_CANCELLED
|
||||
error_dict = {"error": "Request cancelled", "errcode": Codes.UNKNOWN}
|
||||
|
||||
if not request._disconnected:
|
||||
logger.error(
|
||||
"Got cancellation before client disconnection from %r: %r",
|
||||
request.request_metrics.name,
|
||||
request,
|
||||
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type]
|
||||
)
|
||||
else:
|
||||
error_code = 500
|
||||
error_dict = {"error": "Internal server error", "errcode": Codes.UNKNOWN}
|
||||
@@ -155,6 +175,16 @@ def return_html_error(
|
||||
request,
|
||||
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type]
|
||||
)
|
||||
elif f.check(CancelledError):
|
||||
code = HTTP_STATUS_REQUEST_CANCELLED
|
||||
msg = "Request cancelled"
|
||||
|
||||
if not request._disconnected:
|
||||
logger.error(
|
||||
"Got cancellation before client disconnection when handling request %r",
|
||||
request,
|
||||
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type]
|
||||
)
|
||||
else:
|
||||
code = HTTPStatus.INTERNAL_SERVER_ERROR
|
||||
msg = "Internal server error"
|
||||
@@ -295,7 +325,7 @@ class _AsyncResource(resource.Resource, metaclass=abc.ABCMeta):
|
||||
if isawaitable(raw_callback_return):
|
||||
callback_return = await raw_callback_return
|
||||
else:
|
||||
callback_return = raw_callback_return # type: ignore
|
||||
callback_return = raw_callback_return
|
||||
|
||||
return callback_return
|
||||
|
||||
@@ -469,7 +499,7 @@ class JsonResource(DirectServeJsonResource):
|
||||
if isinstance(raw_callback_return, (defer.Deferred, types.CoroutineType)):
|
||||
callback_return = await raw_callback_return
|
||||
else:
|
||||
callback_return = raw_callback_return # type: ignore
|
||||
callback_return = raw_callback_return
|
||||
|
||||
return callback_return
|
||||
|
||||
@@ -683,6 +713,9 @@ def respond_with_json(
|
||||
Returns:
|
||||
twisted.web.server.NOT_DONE_YET if the request is still active.
|
||||
"""
|
||||
# The response code must always be set, for logging purposes.
|
||||
request.setResponseCode(code)
|
||||
|
||||
# could alternatively use request.notifyFinish() and flip a flag when
|
||||
# the Deferred fires, but since the flag is RIGHT THERE it seems like
|
||||
# a waste.
|
||||
@@ -697,7 +730,6 @@ def respond_with_json(
|
||||
else:
|
||||
encoder = _encode_json_bytes
|
||||
|
||||
request.setResponseCode(code)
|
||||
request.setHeader(b"Content-Type", b"application/json")
|
||||
request.setHeader(b"Cache-Control", b"no-cache, no-store, must-revalidate")
|
||||
|
||||
@@ -728,13 +760,15 @@ def respond_with_json_bytes(
|
||||
Returns:
|
||||
twisted.web.server.NOT_DONE_YET if the request is still active.
|
||||
"""
|
||||
# The response code must always be set, for logging purposes.
|
||||
request.setResponseCode(code)
|
||||
|
||||
if request._disconnected:
|
||||
logger.warning(
|
||||
"Not sending response to request %s, already disconnected.", request
|
||||
)
|
||||
return None
|
||||
|
||||
request.setResponseCode(code)
|
||||
request.setHeader(b"Content-Type", b"application/json")
|
||||
request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),))
|
||||
request.setHeader(b"Cache-Control", b"no-cache, no-store, must-revalidate")
|
||||
@@ -840,6 +874,9 @@ def respond_with_html_bytes(request: Request, code: int, html_bytes: bytes) -> N
|
||||
code: The HTTP response code.
|
||||
html_bytes: The HTML bytes to use as the response body.
|
||||
"""
|
||||
# The response code must always be set, for logging purposes.
|
||||
request.setResponseCode(code)
|
||||
|
||||
# could alternatively use request.notifyFinish() and flip a flag when
|
||||
# the Deferred fires, but since the flag is RIGHT THERE it seems like
|
||||
# a waste.
|
||||
@@ -849,7 +886,6 @@ def respond_with_html_bytes(request: Request, code: int, html_bytes: bytes) -> N
|
||||
)
|
||||
return None
|
||||
|
||||
request.setResponseCode(code)
|
||||
request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
|
||||
request.setHeader(b"Content-Length", b"%d" % (len(html_bytes),))
|
||||
|
||||
|
||||
@@ -722,6 +722,11 @@ P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
async def _unwrap_awaitable(awaitable: Awaitable[R]) -> R:
|
||||
"""Unwraps an arbitrary awaitable by awaiting it."""
|
||||
return await awaitable
|
||||
|
||||
|
||||
@overload
|
||||
def preserve_fn( # type: ignore[misc]
|
||||
f: Callable[P, Awaitable[R]],
|
||||
@@ -802,17 +807,20 @@ def run_in_background( # type: ignore[misc]
|
||||
# by synchronous exceptions, so let's turn them into Failures.
|
||||
return defer.fail()
|
||||
|
||||
# `res` may be a coroutine, `Deferred`, some other kind of awaitable, or a plain
|
||||
# value. Convert it to a `Deferred`.
|
||||
if isinstance(res, typing.Coroutine):
|
||||
# Wrap the coroutine in a `Deferred`.
|
||||
res = defer.ensureDeferred(res)
|
||||
|
||||
# At this point we should have a Deferred, if not then f was a synchronous
|
||||
# function, wrap it in a Deferred for consistency.
|
||||
if not isinstance(res, defer.Deferred):
|
||||
# `res` is not a `Deferred` and not a `Coroutine`.
|
||||
# There are no other types of `Awaitable`s we expect to encounter in Synapse.
|
||||
assert not isinstance(res, Awaitable)
|
||||
|
||||
return defer.succeed(res)
|
||||
elif isinstance(res, defer.Deferred):
|
||||
pass
|
||||
elif isinstance(res, Awaitable):
|
||||
# `res` is probably some kind of completed awaitable, such as a `DoneAwaitable`
|
||||
# or `Future` from `make_awaitable`.
|
||||
res = defer.ensureDeferred(_unwrap_awaitable(res))
|
||||
else:
|
||||
# `res` is a plain value. Wrap it in a `Deferred`.
|
||||
res = defer.succeed(res)
|
||||
|
||||
if res.called and not res.paused:
|
||||
# The function should have maintained the logcontext, so we can
|
||||
|
||||
@@ -82,6 +82,7 @@ from synapse.handlers.auth import (
|
||||
ON_LOGGED_OUT_CALLBACK,
|
||||
AuthHandler,
|
||||
)
|
||||
from synapse.handlers.push_rules import RuleSpec, check_actions
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.http.server import (
|
||||
DirectServeHtmlResource,
|
||||
@@ -109,6 +110,7 @@ from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
DomainSpecificString,
|
||||
JsonDict,
|
||||
JsonMapping,
|
||||
Requester,
|
||||
StateMap,
|
||||
UserID,
|
||||
@@ -151,6 +153,7 @@ __all__ = [
|
||||
"PRESENCE_ALL_USERS",
|
||||
"LoginResponse",
|
||||
"JsonDict",
|
||||
"JsonMapping",
|
||||
"EventBase",
|
||||
"StateMap",
|
||||
"ProfileInfo",
|
||||
@@ -193,6 +196,7 @@ class ModuleApi:
|
||||
self._clock: Clock = hs.get_clock()
|
||||
self._registration_handler = hs.get_registration_handler()
|
||||
self._send_email_handler = hs.get_send_email_handler()
|
||||
self._push_rules_handler = hs.get_push_rules_handler()
|
||||
self.custom_template_dir = hs.config.server.custom_template_directory
|
||||
|
||||
try:
|
||||
@@ -1350,6 +1354,68 @@ class ModuleApi:
|
||||
"""
|
||||
await self._store.add_user_bound_threepid(user_id, medium, address, id_server)
|
||||
|
||||
def check_push_rule_actions(
|
||||
self, actions: List[Union[str, Dict[str, str]]]
|
||||
) -> None:
|
||||
"""Checks if the given push rule actions are valid according to the Matrix
|
||||
specification.
|
||||
|
||||
See https://spec.matrix.org/v1.2/client-server-api/#actions for the list of valid
|
||||
actions.
|
||||
|
||||
Added in Synapse v1.58.0.
|
||||
|
||||
Args:
|
||||
actions: the actions to check.
|
||||
|
||||
Raises:
|
||||
synapse.module_api.errors.InvalidRuleException if the actions are invalid.
|
||||
"""
|
||||
check_actions(actions)
|
||||
|
||||
async def set_push_rule_action(
|
||||
self,
|
||||
user_id: str,
|
||||
scope: str,
|
||||
kind: str,
|
||||
rule_id: str,
|
||||
actions: List[Union[str, Dict[str, str]]],
|
||||
) -> None:
|
||||
"""Changes the actions of an existing push rule for the given user.
|
||||
|
||||
See https://spec.matrix.org/v1.2/client-server-api/#push-rules for more
|
||||
information about push rules and their syntax.
|
||||
|
||||
Can only be called on the main process.
|
||||
|
||||
Added in Synapse v1.58.0.
|
||||
|
||||
Args:
|
||||
user_id: the user for which to change the push rule's actions.
|
||||
scope: the push rule's scope, currently only "global" is allowed.
|
||||
kind: the push rule's kind.
|
||||
rule_id: the push rule's identifier.
|
||||
actions: the actions to run when the rule's conditions match.
|
||||
|
||||
Raises:
|
||||
RuntimeError if this method is called on a worker or `scope` is invalid.
|
||||
synapse.module_api.errors.RuleNotFoundException if the rule being modified
|
||||
can't be found.
|
||||
synapse.module_api.errors.InvalidRuleException if the actions are invalid.
|
||||
"""
|
||||
if self.worker_app is not None:
|
||||
raise RuntimeError("module tried to change push rule actions on a worker")
|
||||
|
||||
if scope != "global":
|
||||
raise RuntimeError(
|
||||
"invalid scope %s, only 'global' is currently allowed" % scope
|
||||
)
|
||||
|
||||
spec = RuleSpec(scope, kind, rule_id, "actions")
|
||||
await self._push_rules_handler.set_rule_attr(
|
||||
user_id, spec, {"actions": actions}
|
||||
)
|
||||
|
||||
|
||||
class PublicRoomListManager:
|
||||
"""Contains methods for adding to, removing from and querying whether a room
|
||||
@@ -1419,7 +1485,7 @@ class AccountDataManager:
|
||||
f"{user_id} is not local to this homeserver; can't access account data for remote users."
|
||||
)
|
||||
|
||||
async def get_global(self, user_id: str, data_type: str) -> Optional[JsonDict]:
|
||||
async def get_global(self, user_id: str, data_type: str) -> Optional[JsonMapping]:
|
||||
"""
|
||||
Gets some global account data, of a specified type, for the specified user.
|
||||
|
||||
|
||||
@@ -20,10 +20,14 @@ from synapse.api.errors import (
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.handlers.push_rules import InvalidRuleException
|
||||
from synapse.storage.push_rule import RuleNotFoundException
|
||||
|
||||
__all__ = [
|
||||
"InvalidClientCredentialsError",
|
||||
"RedirectException",
|
||||
"SynapseError",
|
||||
"ConfigError",
|
||||
"InvalidRuleException",
|
||||
"RuleNotFoundException",
|
||||
]
|
||||
|
||||
@@ -882,9 +882,7 @@ class WhoamiRestServlet(RestServlet):
|
||||
|
||||
response = {
|
||||
"user_id": requester.user.to_string(),
|
||||
# MSC: https://github.com/matrix-org/matrix-doc/pull/3069
|
||||
# Entered spec in Matrix 1.2
|
||||
"org.matrix.msc3069.is_guest": bool(requester.is_guest),
|
||||
"is_guest": bool(requester.is_guest),
|
||||
}
|
||||
|
||||
|
||||
@@ -69,9 +69,7 @@ class LoginRestServlet(RestServlet):
|
||||
SSO_TYPE = "m.login.sso"
|
||||
TOKEN_TYPE = "m.login.token"
|
||||
JWT_TYPE = "org.matrix.login.jwt"
|
||||
JWT_TYPE_DEPRECATED = "m.login.jwt"
|
||||
APPSERVICE_TYPE = "m.login.application_service"
|
||||
APPSERVICE_TYPE_UNSTABLE = "uk.half-shot.msc2778.login.application_service"
|
||||
REFRESH_TOKEN_PARAM = "refresh_token"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
@@ -126,7 +124,6 @@ class LoginRestServlet(RestServlet):
|
||||
flows: List[JsonDict] = []
|
||||
if self.jwt_enabled:
|
||||
flows.append({"type": LoginRestServlet.JWT_TYPE})
|
||||
flows.append({"type": LoginRestServlet.JWT_TYPE_DEPRECATED})
|
||||
|
||||
if self.cas_enabled:
|
||||
# we advertise CAS for backwards compat, though MSC1721 renamed it
|
||||
@@ -156,7 +153,6 @@ class LoginRestServlet(RestServlet):
|
||||
flows.extend({"type": t} for t in self.auth_handler.get_supported_login_types())
|
||||
|
||||
flows.append({"type": LoginRestServlet.APPSERVICE_TYPE})
|
||||
flows.append({"type": LoginRestServlet.APPSERVICE_TYPE_UNSTABLE})
|
||||
|
||||
return 200, {"flows": flows}
|
||||
|
||||
@@ -175,10 +171,7 @@ class LoginRestServlet(RestServlet):
|
||||
)
|
||||
|
||||
try:
|
||||
if login_submission["type"] in (
|
||||
LoginRestServlet.APPSERVICE_TYPE,
|
||||
LoginRestServlet.APPSERVICE_TYPE_UNSTABLE,
|
||||
):
|
||||
if login_submission["type"] == LoginRestServlet.APPSERVICE_TYPE:
|
||||
appservice = self.auth.get_appservice_by_req(request)
|
||||
|
||||
if appservice.is_rate_limited():
|
||||
@@ -191,9 +184,9 @@ class LoginRestServlet(RestServlet):
|
||||
appservice,
|
||||
should_issue_refresh_token=should_issue_refresh_token,
|
||||
)
|
||||
elif self.jwt_enabled and (
|
||||
login_submission["type"] == LoginRestServlet.JWT_TYPE
|
||||
or login_submission["type"] == LoginRestServlet.JWT_TYPE_DEPRECATED
|
||||
elif (
|
||||
self.jwt_enabled
|
||||
and login_submission["type"] == LoginRestServlet.JWT_TYPE
|
||||
):
|
||||
await self._address_ratelimiter.ratelimit(None, request.getClientIP())
|
||||
result = await self._do_jwt_login(
|
||||
|
||||
@@ -12,9 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import TYPE_CHECKING, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import attr
|
||||
from typing import TYPE_CHECKING, List, Sequence, Tuple, Union
|
||||
|
||||
from synapse.api.errors import (
|
||||
NotFoundError,
|
||||
@@ -22,6 +20,7 @@ from synapse.api.errors import (
|
||||
SynapseError,
|
||||
UnrecognizedRequestError,
|
||||
)
|
||||
from synapse.handlers.push_rules import InvalidRuleException, RuleSpec, check_actions
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
@@ -29,7 +28,6 @@ from synapse.http.servlet import (
|
||||
parse_string,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.push.baserules import BASE_RULE_IDS
|
||||
from synapse.push.clientformat import format_push_rules_for_user
|
||||
from synapse.push.rulekinds import PRIORITY_CLASS_MAP
|
||||
from synapse.rest.client._base import client_patterns
|
||||
@@ -40,14 +38,6 @@ if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class RuleSpec:
|
||||
scope: str
|
||||
template: str
|
||||
rule_id: str
|
||||
attr: Optional[str]
|
||||
|
||||
|
||||
class PushRuleRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/(?P<path>pushrules/.*)$", v1=True)
|
||||
SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR = (
|
||||
@@ -60,6 +50,7 @@ class PushRuleRestServlet(RestServlet):
|
||||
self.store = hs.get_datastores().main
|
||||
self.notifier = hs.get_notifier()
|
||||
self._is_worker = hs.config.worker.worker_app is not None
|
||||
self._push_rules_handler = hs.get_push_rules_handler()
|
||||
|
||||
async def on_PUT(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDict]:
|
||||
if self._is_worker:
|
||||
@@ -81,8 +72,13 @@ class PushRuleRestServlet(RestServlet):
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
if spec.attr:
|
||||
await self.set_rule_attr(user_id, spec, content)
|
||||
self.notify_user(user_id)
|
||||
try:
|
||||
await self._push_rules_handler.set_rule_attr(user_id, spec, content)
|
||||
except InvalidRuleException as e:
|
||||
raise SynapseError(400, "Invalid actions: %s" % e)
|
||||
except RuleNotFoundException:
|
||||
raise NotFoundError("Unknown rule")
|
||||
|
||||
return 200, {}
|
||||
|
||||
if spec.rule_id.startswith("."):
|
||||
@@ -98,23 +94,23 @@ class PushRuleRestServlet(RestServlet):
|
||||
|
||||
before = parse_string(request, "before")
|
||||
if before:
|
||||
before = _namespaced_rule_id(spec, before)
|
||||
before = f"global/{spec.template}/{before}"
|
||||
|
||||
after = parse_string(request, "after")
|
||||
if after:
|
||||
after = _namespaced_rule_id(spec, after)
|
||||
after = f"global/{spec.template}/{after}"
|
||||
|
||||
try:
|
||||
await self.store.add_push_rule(
|
||||
user_id=user_id,
|
||||
rule_id=_namespaced_rule_id_from_spec(spec),
|
||||
rule_id=f"global/{spec.template}/{spec.rule_id}",
|
||||
priority_class=priority_class,
|
||||
conditions=conditions,
|
||||
actions=actions,
|
||||
before=before,
|
||||
after=after,
|
||||
)
|
||||
self.notify_user(user_id)
|
||||
self._push_rules_handler.notify_user(user_id)
|
||||
except InconsistentRuleException as e:
|
||||
raise SynapseError(400, str(e))
|
||||
except RuleNotFoundException as e:
|
||||
@@ -133,11 +129,11 @@ class PushRuleRestServlet(RestServlet):
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
|
||||
namespaced_rule_id = f"global/{spec.template}/{spec.rule_id}"
|
||||
|
||||
try:
|
||||
await self.store.delete_push_rule(user_id, namespaced_rule_id)
|
||||
self.notify_user(user_id)
|
||||
self._push_rules_handler.notify_user(user_id)
|
||||
return 200, {}
|
||||
except StoreError as e:
|
||||
if e.code == 404:
|
||||
@@ -172,55 +168,6 @@ class PushRuleRestServlet(RestServlet):
|
||||
else:
|
||||
raise UnrecognizedRequestError()
|
||||
|
||||
def notify_user(self, user_id: str) -> None:
|
||||
stream_id = self.store.get_max_push_rules_stream_id()
|
||||
self.notifier.on_new_event("push_rules_key", stream_id, users=[user_id])
|
||||
|
||||
async def set_rule_attr(
|
||||
self, user_id: str, spec: RuleSpec, val: Union[bool, JsonDict]
|
||||
) -> None:
|
||||
if spec.attr not in ("enabled", "actions"):
|
||||
# for the sake of potential future expansion, shouldn't report
|
||||
# 404 in the case of an unknown request so check it corresponds to
|
||||
# a known attribute first.
|
||||
raise UnrecognizedRequestError()
|
||||
|
||||
namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
|
||||
rule_id = spec.rule_id
|
||||
is_default_rule = rule_id.startswith(".")
|
||||
if is_default_rule:
|
||||
if namespaced_rule_id not in BASE_RULE_IDS:
|
||||
raise NotFoundError("Unknown rule %s" % (namespaced_rule_id,))
|
||||
if spec.attr == "enabled":
|
||||
if isinstance(val, dict) and "enabled" in val:
|
||||
val = val["enabled"]
|
||||
if not isinstance(val, bool):
|
||||
# Legacy fallback
|
||||
# This should *actually* take a dict, but many clients pass
|
||||
# bools directly, so let's not break them.
|
||||
raise SynapseError(400, "Value for 'enabled' must be boolean")
|
||||
await self.store.set_push_rule_enabled(
|
||||
user_id, namespaced_rule_id, val, is_default_rule
|
||||
)
|
||||
elif spec.attr == "actions":
|
||||
if not isinstance(val, dict):
|
||||
raise SynapseError(400, "Value must be a dict")
|
||||
actions = val.get("actions")
|
||||
if not isinstance(actions, list):
|
||||
raise SynapseError(400, "Value for 'actions' must be dict")
|
||||
_check_actions(actions)
|
||||
namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
|
||||
rule_id = spec.rule_id
|
||||
is_default_rule = rule_id.startswith(".")
|
||||
if is_default_rule:
|
||||
if namespaced_rule_id not in BASE_RULE_IDS:
|
||||
raise SynapseError(404, "Unknown rule %r" % (namespaced_rule_id,))
|
||||
await self.store.set_push_rule_actions(
|
||||
user_id, namespaced_rule_id, actions, is_default_rule
|
||||
)
|
||||
else:
|
||||
raise UnrecognizedRequestError()
|
||||
|
||||
|
||||
def _rule_spec_from_path(path: Sequence[str]) -> RuleSpec:
|
||||
"""Turn a sequence of path components into a rule spec
|
||||
@@ -291,24 +238,11 @@ def _rule_tuple_from_request_object(
|
||||
raise InvalidRuleException("No actions found")
|
||||
actions = req_obj["actions"]
|
||||
|
||||
_check_actions(actions)
|
||||
check_actions(actions)
|
||||
|
||||
return conditions, actions
|
||||
|
||||
|
||||
def _check_actions(actions: List[Union[str, JsonDict]]) -> None:
|
||||
if not isinstance(actions, list):
|
||||
raise InvalidRuleException("No actions found")
|
||||
|
||||
for a in actions:
|
||||
if a in ["notify", "dont_notify", "coalesce"]:
|
||||
pass
|
||||
elif isinstance(a, dict) and "set_tweak" in a:
|
||||
pass
|
||||
else:
|
||||
raise InvalidRuleException("Unrecognised action")
|
||||
|
||||
|
||||
def _filter_ruleset_with_path(ruleset: JsonDict, path: List[str]) -> JsonDict:
|
||||
if path == []:
|
||||
raise UnrecognizedRequestError(
|
||||
@@ -357,17 +291,5 @@ def _priority_class_from_spec(spec: RuleSpec) -> int:
|
||||
return pc
|
||||
|
||||
|
||||
def _namespaced_rule_id_from_spec(spec: RuleSpec) -> str:
|
||||
return _namespaced_rule_id(spec, spec.rule_id)
|
||||
|
||||
|
||||
def _namespaced_rule_id(spec: RuleSpec, rule_id: str) -> str:
|
||||
return "global/%s/%s" % (spec.template, rule_id)
|
||||
|
||||
|
||||
class InvalidRuleException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
PushRuleRestServlet(hs).register(http_server)
|
||||
|
||||
@@ -929,6 +929,10 @@ def _calculate_registration_flows(
|
||||
# always let users provide both MSISDN & email
|
||||
flows.append([LoginType.MSISDN, LoginType.EMAIL_IDENTITY])
|
||||
|
||||
# Add a flow that doesn't require any 3pids, if the config requests it.
|
||||
if config.registration.enable_registration_token_3pid_bypasss:
|
||||
flows.append([LoginType.REGISTRATION_TOKEN])
|
||||
|
||||
# Prepend m.login.terms to all flows if we're requiring consent
|
||||
if config.consent.user_consent_at_registration:
|
||||
for flow in flows:
|
||||
@@ -942,7 +946,8 @@ def _calculate_registration_flows(
|
||||
# Prepend registration token to all flows if we're requiring a token
|
||||
if config.registration.registration_requires_token:
|
||||
for flow in flows:
|
||||
flow.insert(0, LoginType.REGISTRATION_TOKEN)
|
||||
if LoginType.REGISTRATION_TOKEN not in flow:
|
||||
flow.insert(0, LoginType.REGISTRATION_TOKEN)
|
||||
|
||||
return flows
|
||||
|
||||
|
||||
@@ -91,6 +91,7 @@ from synapse.handlers.presence import (
|
||||
WorkerPresenceHandler,
|
||||
)
|
||||
from synapse.handlers.profile import ProfileHandler
|
||||
from synapse.handlers.push_rules import PushRulesHandler
|
||||
from synapse.handlers.read_marker import ReadMarkerHandler
|
||||
from synapse.handlers.receipts import ReceiptsHandler
|
||||
from synapse.handlers.register import RegistrationHandler
|
||||
@@ -810,6 +811,10 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
def get_account_handler(self) -> AccountHandler:
|
||||
return AccountHandler(self)
|
||||
|
||||
@cache_in_self
|
||||
def get_push_rules_handler(self) -> PushRulesHandler:
|
||||
return PushRulesHandler(self)
|
||||
|
||||
@cache_in_self
|
||||
def get_outbound_redis_connection(self) -> "ConnectionHandler":
|
||||
"""
|
||||
|
||||
@@ -800,7 +800,7 @@ class StateResolutionStore:
|
||||
|
||||
return self.store.get_events(
|
||||
event_ids,
|
||||
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
get_prev_content=False,
|
||||
allow_rejected=allow_rejected,
|
||||
)
|
||||
|
||||
@@ -47,6 +47,7 @@ from synapse.storage.database import (
|
||||
)
|
||||
from synapse.storage.databases.main.events_worker import EventCacheEntry
|
||||
from synapse.storage.databases.main.search import SearchEntry
|
||||
from synapse.storage.engines.postgres import PostgresEngine
|
||||
from synapse.storage.util.id_generators import AbstractStreamIdGenerator
|
||||
from synapse.storage.util.sequence import SequenceGenerator
|
||||
from synapse.types import StateMap, get_domain_from_id
|
||||
@@ -364,6 +365,20 @@ class PersistEventsStore:
|
||||
min_stream_order = events_and_contexts[0][0].internal_metadata.stream_ordering
|
||||
max_stream_order = events_and_contexts[-1][0].internal_metadata.stream_ordering
|
||||
|
||||
# We check that the room still exists for events we're trying to
|
||||
# persist. This is to protect against races with deleting a room.
|
||||
#
|
||||
# Annoyingly SQLite doesn't support row level locking.
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
for room_id in {e.room_id for e, _ in events_and_contexts}:
|
||||
txn.execute(
|
||||
"SELECT room_version FROM rooms WHERE room_id = ? FOR SHARE",
|
||||
(room_id,),
|
||||
)
|
||||
row = txn.fetchone()
|
||||
if row is None:
|
||||
raise Exception(f"Room does not exist {room_id}")
|
||||
|
||||
# stream orderings should have been assigned by now
|
||||
assert min_stream_order
|
||||
assert max_stream_order
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
import logging
|
||||
import threading
|
||||
from enum import Enum, auto
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@@ -30,7 +31,6 @@ from typing import (
|
||||
)
|
||||
|
||||
import attr
|
||||
from constantly import NamedConstant, Names
|
||||
from prometheus_client import Gauge
|
||||
from typing_extensions import Literal
|
||||
|
||||
@@ -150,14 +150,14 @@ class _EventRow:
|
||||
outlier: bool
|
||||
|
||||
|
||||
class EventRedactBehaviour(Names):
|
||||
class EventRedactBehaviour(Enum):
|
||||
"""
|
||||
What to do when retrieving a redacted event from the database.
|
||||
"""
|
||||
|
||||
AS_IS = NamedConstant()
|
||||
REDACT = NamedConstant()
|
||||
BLOCK = NamedConstant()
|
||||
as_is = auto()
|
||||
redact = auto()
|
||||
block = auto()
|
||||
|
||||
|
||||
class EventsWorkerStore(SQLBaseStore):
|
||||
@@ -327,7 +327,7 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
async def get_event(
|
||||
self,
|
||||
event_id: str,
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.REDACT,
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact,
|
||||
get_prev_content: bool = ...,
|
||||
allow_rejected: bool = ...,
|
||||
allow_none: Literal[False] = ...,
|
||||
@@ -339,7 +339,7 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
async def get_event(
|
||||
self,
|
||||
event_id: str,
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.REDACT,
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact,
|
||||
get_prev_content: bool = ...,
|
||||
allow_rejected: bool = ...,
|
||||
allow_none: Literal[True] = ...,
|
||||
@@ -350,7 +350,7 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
async def get_event(
|
||||
self,
|
||||
event_id: str,
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.REDACT,
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact,
|
||||
get_prev_content: bool = False,
|
||||
allow_rejected: bool = False,
|
||||
allow_none: bool = False,
|
||||
@@ -362,9 +362,9 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
event_id: The event_id of the event to fetch
|
||||
|
||||
redact_behaviour: Determine what to do with a redacted event. Possible values:
|
||||
* AS_IS - Return the full event body with no redacted content
|
||||
* REDACT - Return the event but with a redacted body
|
||||
* DISALLOW - Do not return redacted events (behave as per allow_none
|
||||
* as_is - Return the full event body with no redacted content
|
||||
* redact - Return the event but with a redacted body
|
||||
* block - Do not return redacted events (behave as per allow_none
|
||||
if the event is redacted)
|
||||
|
||||
get_prev_content: If True and event is a state event,
|
||||
@@ -406,7 +406,7 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
async def get_events(
|
||||
self,
|
||||
event_ids: Collection[str],
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.REDACT,
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact,
|
||||
get_prev_content: bool = False,
|
||||
allow_rejected: bool = False,
|
||||
) -> Dict[str, EventBase]:
|
||||
@@ -417,9 +417,9 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
|
||||
redact_behaviour: Determine what to do with a redacted event. Possible
|
||||
values:
|
||||
* AS_IS - Return the full event body with no redacted content
|
||||
* REDACT - Return the event but with a redacted body
|
||||
* DISALLOW - Do not return redacted events (omit them from the response)
|
||||
* as_is - Return the full event body with no redacted content
|
||||
* redact - Return the event but with a redacted body
|
||||
* block - Do not return redacted events (omit them from the response)
|
||||
|
||||
get_prev_content: If True and event is a state event,
|
||||
include the previous states content in the unsigned field.
|
||||
@@ -442,7 +442,7 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
async def get_events_as_list(
|
||||
self,
|
||||
event_ids: Collection[str],
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.REDACT,
|
||||
redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact,
|
||||
get_prev_content: bool = False,
|
||||
allow_rejected: bool = False,
|
||||
) -> List[EventBase]:
|
||||
@@ -455,9 +455,9 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
event_ids: The event_ids of the events to fetch
|
||||
|
||||
redact_behaviour: Determine what to do with a redacted event. Possible values:
|
||||
* AS_IS - Return the full event body with no redacted content
|
||||
* REDACT - Return the event but with a redacted body
|
||||
* DISALLOW - Do not return redacted events (omit them from the response)
|
||||
* as_is - Return the full event body with no redacted content
|
||||
* redact - Return the event but with a redacted body
|
||||
* block - Do not return redacted events (omit them from the response)
|
||||
|
||||
get_prev_content: If True and event is a state event,
|
||||
include the previous states content in the unsigned field.
|
||||
@@ -568,10 +568,10 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
event = entry.event
|
||||
|
||||
if entry.redacted_event:
|
||||
if redact_behaviour == EventRedactBehaviour.BLOCK:
|
||||
if redact_behaviour == EventRedactBehaviour.block:
|
||||
# Skip this event
|
||||
continue
|
||||
elif redact_behaviour == EventRedactBehaviour.REDACT:
|
||||
elif redact_behaviour == EventRedactBehaviour.redact:
|
||||
event = entry.redacted_event
|
||||
|
||||
events.append(event)
|
||||
@@ -1094,6 +1094,18 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
original_ev.internal_metadata.stream_ordering = row.stream_ordering
|
||||
original_ev.internal_metadata.outlier = row.outlier
|
||||
|
||||
# Consistency check: if the content of the event has been modified in the
|
||||
# database, then the calculated event ID will not match the event id in the
|
||||
# database.
|
||||
if original_ev.event_id != event_id:
|
||||
# it's difficult to see what to do here. Pretty much all bets are off
|
||||
# if Synapse cannot rely on the consistency of its database.
|
||||
raise RuntimeError(
|
||||
f"Database corruption: Event {event_id} in room {d['room_id']} "
|
||||
f"from the database appears to have been modified (calculated "
|
||||
f"event id {original_ev.event_id})"
|
||||
)
|
||||
|
||||
event_map[event_id] = original_ev
|
||||
|
||||
# finally, we can decide whether each one needs redacting, and build
|
||||
|
||||
@@ -232,10 +232,10 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
|
||||
# is racy.
|
||||
# Have resolved to invalidate the whole cache for now and do
|
||||
# something about it if and when the perf becomes significant
|
||||
self._invalidate_all_cache_and_stream( # type: ignore[attr-defined]
|
||||
self._invalidate_all_cache_and_stream(
|
||||
txn, self.user_last_seen_monthly_active
|
||||
)
|
||||
self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ()) # type: ignore[attr-defined]
|
||||
self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ())
|
||||
|
||||
reserved_users = await self.get_registered_reserved_users()
|
||||
await self.db_pool.runInteraction(
|
||||
@@ -363,7 +363,7 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
|
||||
|
||||
if self._limit_usage_by_mau or self._mau_stats_only:
|
||||
# Trial users and guests should not be included as part of MAU group
|
||||
is_guest = await self.is_guest(user_id) # type: ignore[attr-defined]
|
||||
is_guest = await self.is_guest(user_id)
|
||||
if is_guest:
|
||||
return
|
||||
is_trial = await self.is_trial_user(user_id)
|
||||
|
||||
@@ -324,7 +324,12 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
||||
)
|
||||
|
||||
def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]:
|
||||
# First we fetch all the state groups that should be deleted, before
|
||||
# We *immediately* delete the room from the rooms table. This ensures
|
||||
# that we don't race when persisting events (as that transaction checks
|
||||
# that the room exists).
|
||||
txn.execute("DELETE FROM rooms WHERE room_id = ?", (room_id,))
|
||||
|
||||
# Next, we fetch all the state groups that should be deleted, before
|
||||
# we delete that information.
|
||||
txn.execute(
|
||||
"""
|
||||
@@ -403,7 +408,6 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
||||
"room_stats_state",
|
||||
"room_stats_current",
|
||||
"room_stats_earliest_token",
|
||||
"rooms",
|
||||
"stream_ordering_to_exterm",
|
||||
"users_in_public_rooms",
|
||||
"users_who_share_private_rooms",
|
||||
|
||||
@@ -16,7 +16,7 @@ import abc
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, List, Tuple, Union
|
||||
|
||||
from synapse.api.errors import NotFoundError, StoreError
|
||||
from synapse.api.errors import StoreError
|
||||
from synapse.push.baserules import list_with_base_rules
|
||||
from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
|
||||
from synapse.storage._base import SQLBaseStore, db_to_json
|
||||
@@ -618,7 +618,7 @@ class PushRuleStore(PushRulesWorkerStore):
|
||||
are always stored in the database `push_rules` table).
|
||||
|
||||
Raises:
|
||||
NotFoundError if the rule does not exist.
|
||||
RuleNotFoundException if the rule does not exist.
|
||||
"""
|
||||
async with self._push_rules_stream_id_gen.get_next() as stream_id:
|
||||
event_stream_ordering = self._stream_id_gen.get_current_token()
|
||||
@@ -668,8 +668,7 @@ class PushRuleStore(PushRulesWorkerStore):
|
||||
)
|
||||
txn.execute(sql, (user_id, rule_id))
|
||||
if txn.fetchone() is None:
|
||||
# needed to set NOT_FOUND code.
|
||||
raise NotFoundError("Push rule does not exist.")
|
||||
raise RuleNotFoundException("Push rule does not exist.")
|
||||
|
||||
self.db_pool.simple_upsert_txn(
|
||||
txn,
|
||||
@@ -698,9 +697,6 @@ class PushRuleStore(PushRulesWorkerStore):
|
||||
"""
|
||||
Sets the `actions` state of a push rule.
|
||||
|
||||
Will throw NotFoundError if the rule does not exist; the Code for this
|
||||
is NOT_FOUND.
|
||||
|
||||
Args:
|
||||
user_id: the user ID of the user who wishes to enable/disable the rule
|
||||
e.g. '@tina:example.org'
|
||||
@@ -712,6 +708,9 @@ class PushRuleStore(PushRulesWorkerStore):
|
||||
is_default_rule: True if and only if this is a server-default rule.
|
||||
This skips the check for existence (as only user-created rules
|
||||
are always stored in the database `push_rules` table).
|
||||
|
||||
Raises:
|
||||
RuleNotFoundException if the rule does not exist.
|
||||
"""
|
||||
actions_json = json_encoder.encode(actions)
|
||||
|
||||
@@ -744,7 +743,7 @@ class PushRuleStore(PushRulesWorkerStore):
|
||||
except StoreError as serr:
|
||||
if serr.code == 404:
|
||||
# this sets the NOT_FOUND error Code
|
||||
raise NotFoundError("Push rule does not exist")
|
||||
raise RuleNotFoundException("Push rule does not exist")
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
@@ -122,10 +122,21 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
receipts = await self.get_receipts_for_room(room_id, ReceiptTypes.READ)
|
||||
return {r["user_id"] for r in receipts}
|
||||
|
||||
@cached(num_args=2)
|
||||
@cached()
|
||||
async def get_receipts_for_room(
|
||||
self, room_id: str, receipt_type: str
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Fetch the event IDs for the latest receipt for all users in a room with the given receipt type.
|
||||
|
||||
Args:
|
||||
room_id: The room ID to fetch the receipt for.
|
||||
receipt_type: The receipt type to fetch.
|
||||
|
||||
Returns:
|
||||
A list of dictionaries, one for each user ID. Each dictionary
|
||||
contains a user ID and the event ID of that user's latest receipt.
|
||||
"""
|
||||
return await self.db_pool.simple_select_list(
|
||||
table="receipts_linearized",
|
||||
keyvalues={"room_id": room_id, "receipt_type": receipt_type},
|
||||
@@ -133,10 +144,21 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
desc="get_receipts_for_room",
|
||||
)
|
||||
|
||||
@cached(num_args=3)
|
||||
@cached()
|
||||
async def get_last_receipt_event_id_for_user(
|
||||
self, user_id: str, room_id: str, receipt_type: str
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Fetch the event ID for the latest receipt in a room with the given receipt type.
|
||||
|
||||
Args:
|
||||
user_id: The user to fetch receipts for.
|
||||
room_id: The room ID to fetch the receipt for.
|
||||
receipt_type: The receipt type to fetch.
|
||||
|
||||
Returns:
|
||||
The event ID of the latest receipt, if one exists; otherwise `None`.
|
||||
"""
|
||||
return await self.db_pool.simple_select_one_onecol(
|
||||
table="receipts_linearized",
|
||||
keyvalues={
|
||||
@@ -149,10 +171,23 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
@cached(num_args=2)
|
||||
@cached()
|
||||
async def get_receipts_for_user(
|
||||
self, user_id: str, receipt_type: str
|
||||
) -> Dict[str, str]:
|
||||
"""
|
||||
Fetch the event IDs for the latest receipts sent by the given user.
|
||||
|
||||
Args:
|
||||
user_id: The user to fetch receipts for.
|
||||
receipt_type: The receipt type to fetch.
|
||||
|
||||
Returns:
|
||||
A map of room ID to the event ID of the latest receipt for that room.
|
||||
|
||||
If the user has not sent a receipt to a room then it will not appear
|
||||
in the returned dictionary.
|
||||
"""
|
||||
rows = await self.db_pool.simple_select_list(
|
||||
table="receipts_linearized",
|
||||
keyvalues={"user_id": user_id, "receipt_type": receipt_type},
|
||||
@@ -165,6 +200,17 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
async def get_receipts_for_user_with_orderings(
|
||||
self, user_id: str, receipt_type: str
|
||||
) -> JsonDict:
|
||||
"""
|
||||
Fetch receipts for all rooms that the given user is joined to.
|
||||
|
||||
Args:
|
||||
user_id: The user to fetch receipts for.
|
||||
receipt_type: The receipt type to fetch.
|
||||
|
||||
Returns:
|
||||
A map of room ID to the latest receipt information.
|
||||
"""
|
||||
|
||||
def f(txn: LoggingTransaction) -> List[Tuple[str, str, int, int]]:
|
||||
sql = (
|
||||
"SELECT rl.room_id, rl.event_id,"
|
||||
@@ -241,7 +287,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
|
||||
return await self._get_linearized_receipts_for_room(room_id, to_key, from_key)
|
||||
|
||||
@cached(num_args=3, tree=True)
|
||||
@cached(tree=True)
|
||||
async def _get_linearized_receipts_for_room(
|
||||
self, room_id: str, to_key: int, from_key: Optional[int] = None
|
||||
) -> List[JsonDict]:
|
||||
@@ -541,7 +587,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
data: JsonDict,
|
||||
stream_id: int,
|
||||
) -> Optional[int]:
|
||||
"""Inserts a read-receipt into the database if it's newer than the current RR
|
||||
"""Inserts a receipt into the database if it's newer than the current one.
|
||||
|
||||
Returns:
|
||||
None if the RR is older than the current RR
|
||||
|
||||
@@ -445,8 +445,8 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||
@cachedList(cached_method_name="get_thread_summary", list_name="event_ids")
|
||||
async def get_thread_summaries(
|
||||
self, event_ids: Collection[str]
|
||||
) -> Dict[str, Optional[Tuple[int, EventBase, Optional[EventBase]]]]:
|
||||
"""Get the number of threaded replies, the latest reply (if any), and the latest edit for that reply for the given event.
|
||||
) -> Dict[str, Optional[Tuple[int, EventBase]]]:
|
||||
"""Get the number of threaded replies and the latest reply (if any) for the given events.
|
||||
|
||||
Args:
|
||||
event_ids: Summarize the thread related to this event ID.
|
||||
@@ -458,7 +458,6 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||
Each summary is a tuple of:
|
||||
The number of events in the thread.
|
||||
The most recent event in the thread.
|
||||
The most recent edit to the most recent event in the thread, if applicable.
|
||||
"""
|
||||
|
||||
def _get_thread_summaries_txn(
|
||||
@@ -544,9 +543,6 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||
|
||||
latest_events = await self.get_events(latest_event_ids.values()) # type: ignore[attr-defined]
|
||||
|
||||
# Check to see if any of those events are edited.
|
||||
latest_edits = await self.get_applicable_edits(latest_event_ids.values())
|
||||
|
||||
# Map to the event IDs to the thread summary.
|
||||
#
|
||||
# There might not be a summary due to there not being a thread or
|
||||
@@ -557,8 +553,7 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||
|
||||
summary = None
|
||||
if latest_event:
|
||||
latest_edit = latest_edits.get(latest_event_id)
|
||||
summary = (counts[parent_event_id], latest_event, latest_edit)
|
||||
summary = (counts[parent_event_id], latest_event)
|
||||
summaries[parent_event_id] = summary
|
||||
|
||||
return summaries
|
||||
|
||||
@@ -494,11 +494,11 @@ class SearchStore(SearchBackgroundUpdateStore):
|
||||
|
||||
results = list(filter(lambda row: row["room_id"] in room_ids, results))
|
||||
|
||||
# We set redact_behaviour to BLOCK here to prevent redacted events being returned in
|
||||
# We set redact_behaviour to block here to prevent redacted events being returned in
|
||||
# search results (which is a data leak)
|
||||
events = await self.get_events_as_list( # type: ignore[attr-defined]
|
||||
[r["event_id"] for r in results],
|
||||
redact_behaviour=EventRedactBehaviour.BLOCK,
|
||||
redact_behaviour=EventRedactBehaviour.block,
|
||||
)
|
||||
|
||||
event_map = {ev.event_id: ev for ev in events}
|
||||
@@ -652,11 +652,11 @@ class SearchStore(SearchBackgroundUpdateStore):
|
||||
|
||||
results = list(filter(lambda row: row["room_id"] in room_ids, results))
|
||||
|
||||
# We set redact_behaviour to BLOCK here to prevent redacted events being returned in
|
||||
# We set redact_behaviour to block here to prevent redacted events being returned in
|
||||
# search results (which is a data leak)
|
||||
events = await self.get_events_as_list( # type: ignore[attr-defined]
|
||||
[r["event_id"] for r in results],
|
||||
redact_behaviour=EventRedactBehaviour.BLOCK,
|
||||
redact_behaviour=EventRedactBehaviour.block,
|
||||
)
|
||||
|
||||
event_map = {ev.event_id: ev for ev in events}
|
||||
|
||||
@@ -48,7 +48,7 @@ class SignatureWorkerStore(EventsWorkerStore):
|
||||
"""
|
||||
events = await self.get_events(
|
||||
event_ids,
|
||||
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
allow_rejected=True,
|
||||
)
|
||||
|
||||
|
||||
@@ -12,11 +12,10 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import collections.abc
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Collection, Dict, Iterable, Optional, Set, Tuple
|
||||
|
||||
from frozendict import frozendict
|
||||
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.errors import NotFoundError, UnsupportedRoomVersionError
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
||||
@@ -160,7 +159,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
|
||||
predecessor = create_event.content.get("predecessor", None)
|
||||
|
||||
# Ensure the key is a dictionary
|
||||
if not isinstance(predecessor, (dict, frozendict)):
|
||||
if not isinstance(predecessor, collections.abc.Mapping):
|
||||
return None
|
||||
|
||||
# The keys must be strings since the data is JSON.
|
||||
|
||||
@@ -943,7 +943,7 @@ class EventsPersistenceStorage:
|
||||
dropped_events = await self.main_store.get_events(
|
||||
dropped_extrems,
|
||||
allow_rejected=True,
|
||||
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
)
|
||||
|
||||
new_senders = {get_domain_from_id(e.sender) for e, _ in events_context}
|
||||
@@ -974,7 +974,7 @@ class EventsPersistenceStorage:
|
||||
prev_events = await self.main_store.get_events(
|
||||
new_events,
|
||||
allow_rejected=True,
|
||||
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
)
|
||||
events_to_check = prev_events.values()
|
||||
|
||||
|
||||
@@ -501,11 +501,11 @@ def _upgrade_existing_database(
|
||||
|
||||
if hasattr(module, "run_create"):
|
||||
logger.info("Running %s:run_create", relative_path)
|
||||
module.run_create(cur, database_engine) # type: ignore
|
||||
module.run_create(cur, database_engine)
|
||||
|
||||
if not is_empty and hasattr(module, "run_upgrade"):
|
||||
logger.info("Running %s:run_upgrade", relative_path)
|
||||
module.run_upgrade(cur, database_engine, config=config) # type: ignore
|
||||
module.run_upgrade(cur, database_engine, config=config)
|
||||
elif ext == ".pyc" or file_name == "__pycache__":
|
||||
# Sometimes .pyc files turn up anyway even though we've
|
||||
# disabled their generation; e.g. from distribution package
|
||||
|
||||
@@ -107,7 +107,7 @@ class TTLCache(Generic[KT, VT]):
|
||||
self._metrics.inc_hits()
|
||||
return e.value, e.expiry_time, e.ttl
|
||||
|
||||
def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: # type: ignore
|
||||
def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]:
|
||||
"""Remove a value from the cache
|
||||
|
||||
If key is in the cache, remove it and return its value, else return default.
|
||||
|
||||
@@ -54,16 +54,10 @@ class DependencyException(Exception):
|
||||
|
||||
|
||||
DEV_EXTRAS = {"lint", "mypy", "test", "dev"}
|
||||
try:
|
||||
RUNTIME_EXTRAS = (
|
||||
set(metadata.metadata(DISTRIBUTION_NAME).get_all("Provides-Extra")) - DEV_EXTRAS
|
||||
)
|
||||
VERSION = metadata.version(DISTRIBUTION_NAME)
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"Unable to read Synapse's package installation metadata. "
|
||||
"This may be a problem with how Synapse has been packaged."
|
||||
) from e
|
||||
RUNTIME_EXTRAS = (
|
||||
set(metadata.metadata(DISTRIBUTION_NAME).get_all("Provides-Extra")) - DEV_EXTRAS
|
||||
)
|
||||
VERSION = metadata.version(DISTRIBUTION_NAME)
|
||||
|
||||
|
||||
def _is_dev_dependency(req: Requirement) -> bool:
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import collections.abc
|
||||
from typing import Any
|
||||
|
||||
from frozendict import frozendict
|
||||
@@ -35,7 +36,7 @@ def freeze(o: Any) -> Any:
|
||||
|
||||
|
||||
def unfreeze(o: Any) -> Any:
|
||||
if isinstance(o, (dict, frozendict)):
|
||||
if isinstance(o, collections.abc.Mapping):
|
||||
return {k: unfreeze(v) for k, v in o.items()}
|
||||
|
||||
if isinstance(o, (bytes, str)):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user