Pydantic v2 (#19071)

Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Co-authored-by: Andrew Morgan <andrew@amorgan.xyz>
This commit is contained in:
V02460
2025-10-31 10:22:22 +01:00
committed by GitHub
parent 300c5558ab
commit 3595ff921f
33 changed files with 422 additions and 1005 deletions

View File

@@ -207,26 +207,6 @@ jobs:
env:
PULL_REQUEST_NUMBER: ${{ github.event.number }}
lint-pydantic:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
poetry-version: "2.1.1"
extras: "all"
- run: poetry run scripts-dev/check_pydantic_models.py
lint-clippy:
runs-on: ubuntu-latest
needs: changes
@@ -341,7 +321,6 @@ jobs:
- lint-mypy
- lint-crlf
- lint-newsfile
- lint-pydantic
- check-sampleconfig
- check-schema-delta
- check-lockfile
@@ -363,7 +342,6 @@ jobs:
lint
lint-mypy
lint-newsfile
lint-pydantic
lint-clippy
lint-clippy-nightly
lint-rust

1
changelog.d/19071.misc Normal file
View File

@@ -0,0 +1 @@
Update pydantic to v2.

294
poetry.lock generated
View File

@@ -6,7 +6,7 @@ version = "0.7.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
groups = ["main"]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@@ -39,7 +39,7 @@ description = "The ultimate Python library in building OAuth and OpenID Connect
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\""
files = [
{file = "authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a"},
{file = "authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b"},
@@ -444,7 +444,7 @@ description = "XML bomb protection for Python stdlib modules"
optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
@@ -469,7 +469,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"},
{file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"},
@@ -519,7 +519,7 @@ description = "Python wrapper for hiredis"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"all\" or extra == \"redis\""
markers = "extra == \"redis\" or extra == \"all\""
files = [
{file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:9937d9b69321b393fbace69f55423480f098120bc55a3316e1ca3508c4dbbd6f"},
{file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:50351b77f89ba6a22aff430b993653847f36b71d444509036baa0f2d79d1ebf4"},
@@ -842,7 +842,7 @@ description = "Jaeger Python OpenTracing Tracer implementation"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
]
@@ -980,7 +980,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
files = [
{file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
{file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
@@ -996,7 +996,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"all\" or extra == \"url-preview\""
markers = "extra == \"url-preview\" or extra == \"all\""
files = [
{file = "lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388"},
{file = "lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153"},
@@ -1283,7 +1283,7 @@ description = "An LDAP3 auth provider for Synapse"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
files = [
{file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"},
{file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"},
@@ -1525,7 +1525,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
]
@@ -1731,7 +1731,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"all\" or extra == \"postgres\""
markers = "extra == \"postgres\" or extra == \"all\""
files = [
{file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"},
{file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"},
@@ -1739,7 +1739,6 @@ files = [
{file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"},
{file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"},
{file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"},
{file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"},
{file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"},
{file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"},
{file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"},
@@ -1752,7 +1751,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas
optional = true
python-versions = "*"
groups = ["main"]
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
files = [
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
]
@@ -1768,7 +1767,7 @@ description = "A Simple library to enable psycopg2 compatability"
optional = true
python-versions = "*"
groups = ["main"]
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
files = [
{file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"},
]
@@ -1817,21 +1816,21 @@ files = [
[[package]]
name = "pydantic"
version = "2.11.10"
version = "2.12.3"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
groups = ["main"]
files = [
{file = "pydantic-2.11.10-py3-none-any.whl", hash = "sha256:802a655709d49bd004c31e865ef37da30b540786a46bfce02333e0e24b5fe29a"},
{file = "pydantic-2.11.10.tar.gz", hash = "sha256:dc280f0982fbda6c38fada4e476dc0a4f3aeaf9c6ad4c28df68a666ec3c61423"},
{file = "pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf"},
{file = "pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74"},
]
[package.dependencies]
annotated-types = ">=0.6.0"
pydantic-core = "2.33.2"
typing-extensions = ">=4.12.2"
typing-inspection = ">=0.4.0"
pydantic-core = "2.41.4"
typing-extensions = ">=4.14.1"
typing-inspection = ">=0.4.2"
[package.extras]
email = ["email-validator (>=2.0.0)"]
@@ -1839,115 +1838,133 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows
[[package]]
name = "pydantic-core"
version = "2.33.2"
version = "2.41.4"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
groups = ["main"]
files = [
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"},
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"},
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"},
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"},
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"},
{file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"},
{file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"},
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"},
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"},
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"},
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"},
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"},
{file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"},
{file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"},
{file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"},
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"},
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"},
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"},
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"},
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"},
{file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"},
{file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"},
{file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"},
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"},
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"},
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"},
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"},
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"},
{file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"},
{file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"},
{file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"},
{file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"},
{file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"},
{file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"},
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"},
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"},
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"},
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"},
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"},
{file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"},
{file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"},
{file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"},
{file = "pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e"},
{file = "pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b"},
{file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd"},
{file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945"},
{file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706"},
{file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba"},
{file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b"},
{file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d"},
{file = "pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700"},
{file = "pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6"},
{file = "pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9"},
{file = "pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57"},
{file = "pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc"},
{file = "pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80"},
{file = "pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae"},
{file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827"},
{file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f"},
{file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def"},
{file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2"},
{file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8"},
{file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265"},
{file = "pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c"},
{file = "pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a"},
{file = "pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e"},
{file = "pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03"},
{file = "pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e"},
{file = "pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db"},
{file = "pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887"},
{file = "pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2"},
{file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999"},
{file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4"},
{file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f"},
{file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b"},
{file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47"},
{file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970"},
{file = "pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed"},
{file = "pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8"},
{file = "pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431"},
{file = "pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd"},
{file = "pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff"},
{file = "pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8"},
{file = "pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746"},
{file = "pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced"},
{file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a"},
{file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02"},
{file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1"},
{file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2"},
{file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84"},
{file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d"},
{file = "pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d"},
{file = "pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2"},
{file = "pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab"},
{file = "pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c"},
{file = "pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4"},
{file = "pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564"},
{file = "pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4"},
{file = "pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2"},
{file = "pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf"},
{file = "pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2"},
{file = "pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89"},
{file = "pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1"},
{file = "pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac"},
{file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554"},
{file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e"},
{file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616"},
{file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af"},
{file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12"},
{file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d"},
{file = "pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad"},
{file = "pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a"},
{file = "pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025"},
{file = "pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e"},
{file = "pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894"},
{file = "pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d"},
{file = "pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da"},
{file = "pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e"},
{file = "pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa"},
{file = "pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d"},
{file = "pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0"},
{file = "pydantic_core-2.41.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:646e76293345954acea6966149683047b7b2ace793011922208c8e9da12b0062"},
{file = "pydantic_core-2.41.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc8e85a63085a137d286e2791037f5fdfff0aabb8b899483ca9c496dd5797338"},
{file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c622c8f859a17c156492783902d8370ac7e121a611bd6fe92cc71acf9ee8d"},
{file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1e2906efb1031a532600679b424ef1d95d9f9fb507f813951f23320903adbd7"},
{file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04e2f7f8916ad3ddd417a7abdd295276a0bf216993d9318a5d61cc058209166"},
{file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df649916b81822543d1c8e0e1d079235f68acdc7d270c911e8425045a8cfc57e"},
{file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c529f862fdba70558061bb936fe00ddbaaa0c647fd26e4a4356ef1d6561891"},
{file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc3b4c5a1fd3a311563ed866c2c9b62da06cb6398bee186484ce95c820db71cb"},
{file = "pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6e0fc40d84448f941df9b3334c4b78fe42f36e3bf631ad54c3047a0cdddc2514"},
{file = "pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:44e7625332683b6c1c8b980461475cde9595eff94447500e80716db89b0da005"},
{file = "pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:170ee6835f6c71081d031ef1c3b4dc4a12b9efa6a9540f93f95b82f3c7571ae8"},
{file = "pydantic_core-2.41.4-cp39-cp39-win32.whl", hash = "sha256:3adf61415efa6ce977041ba9745183c0e1f637ca849773afa93833e04b163feb"},
{file = "pydantic_core-2.41.4-cp39-cp39-win_amd64.whl", hash = "sha256:a238dd3feee263eeaeb7dc44aea4ba1364682c4f9f9467e6af5596ba322c2332"},
{file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b"},
{file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42"},
{file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee"},
{file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c"},
{file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537"},
{file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94"},
{file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c"},
{file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335"},
{file = "pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00"},
{file = "pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9"},
{file = "pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2"},
{file = "pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258"},
{file = "pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347"},
{file = "pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa"},
{file = "pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a"},
{file = "pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d"},
{file = "pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5"},
{file = "pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2"},
{file = "pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd"},
{file = "pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c"},
{file = "pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405"},
{file = "pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8"},
{file = "pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308"},
{file = "pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f"},
{file = "pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5"},
]
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
typing-extensions = ">=4.14.1"
[[package]]
name = "pygithub"
@@ -2027,7 +2044,7 @@ description = "A development tool to measure, monitor and analyze the memory beh
optional = true
python-versions = ">=3.6"
groups = ["main"]
markers = "extra == \"all\" or extra == \"cache-memory\""
markers = "extra == \"cache-memory\" or extra == \"all\""
files = [
{file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"},
{file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"},
@@ -2087,7 +2104,7 @@ description = "Python implementation of SAML Version 2 Standard"
optional = true
python-versions = ">=3.9,<4.0"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"},
{file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"},
@@ -2112,7 +2129,7 @@ description = "Extensions to the standard Python datetime module"
optional = true
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
@@ -2140,7 +2157,7 @@ description = "World timezone definitions, modern and historical"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
{file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
@@ -2526,7 +2543,7 @@ description = "Python client for Sentry (https://sentry.io)"
optional = true
python-versions = ">=3.6"
groups = ["main"]
markers = "extra == \"all\" or extra == \"sentry\""
markers = "extra == \"sentry\" or extra == \"all\""
files = [
{file = "sentry_sdk-2.34.1-py2.py3-none-any.whl", hash = "sha256:b7a072e1cdc5abc48101d5146e1ae680fa81fe886d8d95aaa25a0b450c818d32"},
{file = "sentry_sdk-2.34.1.tar.gz", hash = "sha256:69274eb8c5c38562a544c3e9f68b5be0a43be4b697f5fd385bf98e4fbe672687"},
@@ -2714,7 +2731,7 @@ description = "Tornado IOLoop Backed Concurrent Futures"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"},
{file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"},
@@ -2730,7 +2747,7 @@ description = "Python bindings for the Apache Thrift RPC system"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
]
@@ -2784,6 +2801,7 @@ files = [
{file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
{file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
]
markers = {main = "python_version < \"3.14\""}
[[package]]
name = "tornado"
@@ -2792,7 +2810,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"},
{file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"},
@@ -2926,7 +2944,7 @@ description = "non-blocking redis client for python"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"redis\""
markers = "extra == \"redis\" or extra == \"all\""
files = [
{file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"},
{file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"},
@@ -3110,14 +3128,14 @@ files = [
[[package]]
name = "typing-inspection"
version = "0.4.0"
version = "0.4.2"
description = "Runtime typing introspection tools"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
groups = ["main"]
files = [
{file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"},
{file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"},
{file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"},
{file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"},
]
[package.dependencies]
@@ -3172,7 +3190,7 @@ description = "An XML Schema validator and decoder"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"},
{file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"},
@@ -3316,4 +3334,4 @@ url-preview = ["lxml"]
[metadata]
lock-version = "2.1"
python-versions = "^3.10.0"
content-hash = "0122c5aa55099678f2ba5094ec393ebd814def15213388b33e5f1d7760392ffc"
content-hash = "363f8059c998566788b0465c338a3a8aaa56d1e61cc347f2473b687ff34f2a8d"

View File

@@ -220,8 +220,8 @@ netaddr = ">=0.7.18"
Jinja2 = ">=3.0"
# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
bleach = ">=3.2.0"
# We use `assert_never`, which were added in `typing-extensions` 4.1.
typing-extensions = ">=4.1"
# pydantic 2.12 depends on typing-extensions>=4.14.1
typing-extensions = ">=4.14.1"
# We enforce that we have a `cryptography` version that bundles an `openssl`
# with the latest security patches.
cryptography = ">=3.4.7"
@@ -230,9 +230,10 @@ ijson = ">=3.1.4"
matrix-common = "^1.3.0"
# We need packaging.verison.Version(...).major added in 20.0.
packaging = ">=20.0"
# We support pydantic v1 and pydantic v2 via the pydantic.v1 compat module.
# See https://github.com/matrix-org/synapse/issues/15858
pydantic = ">=1.7.4, <3"
pydantic = [
{ version = "~=2.8", python = "<3.14" },
{ version = "~=2.12", python = ">=3.14" },
]
# This is for building the rust components during "poetry install", which
# currently ignores the `build-system.requires` directive (c.f.
@@ -335,8 +336,6 @@ all = [
# can bump versions without having to update the content-hash in the lockfile.
# This helps prevents merge conflicts when running a batch of dependabot updates.
ruff = "0.12.10"
# Type checking only works with the pydantic.v1 compat module from pydantic v2
pydantic = "^2"
# Typechecking
lxml-stubs = ">=0.4.0"

View File

@@ -1,474 +0,0 @@
#! /usr/bin/env python
#
# This file is licensed under the Affero General Public License (AGPL) version 3.
#
# Copyright 2022 The Matrix.org Foundation C.I.C.
# Copyright (C) 2023 New Vector, Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# See the GNU Affero General Public License for more details:
# <https://www.gnu.org/licenses/agpl-3.0.html>.
#
# Originally licensed under the Apache License, Version 2.0:
# <http://www.apache.org/licenses/LICENSE-2.0>.
#
# [This file includes modifications made by New Vector Limited]
#
#
"""
A script which enforces that Synapse always uses strict types when defining a Pydantic
model.
Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. See
https://github.com/pydantic/pydantic/issues/1098
https://pydantic-docs.helpmanual.io/blog/pydantic-v2/#strict-mode
until then, this script is a best effort to stop us from introducing type coersion bugs
(like the infamous stringy power levels fixed in room version 10).
"""
import argparse
import contextlib
import functools
import importlib
import logging
import os
import pkgutil
import sys
import textwrap
import traceback
import unittest.mock
from contextlib import contextmanager
from typing import (
Any,
Callable,
Generator,
TypeVar,
)
from parameterized import parameterized
from typing_extensions import ParamSpec
from synapse._pydantic_compat import (
BaseModel as PydanticBaseModel,
conbytes,
confloat,
conint,
constr,
get_args,
)
logger = logging.getLogger(__name__)
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [
constr,
conbytes,
conint,
confloat,
]
TYPES_THAT_PYDANTIC_WILL_COERCE_TO = [
str,
bytes,
int,
float,
bool,
]
P = ParamSpec("P")
R = TypeVar("R")
class ModelCheckerException(Exception):
"""Dummy exception. Allows us to detect unwanted types during a module import."""
class MissingStrictInConstrainedTypeException(ModelCheckerException):
factory_name: str
def __init__(self, factory_name: str):
self.factory_name = factory_name
class FieldHasUnwantedTypeException(ModelCheckerException):
message: str
def __init__(self, message: str):
self.message = message
def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]:
"""We patch `constr` and friends with wrappers that enforce strict=True."""
@functools.wraps(factory)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
if "strict" not in kwargs:
raise MissingStrictInConstrainedTypeException(factory.__name__)
if not kwargs["strict"]:
raise MissingStrictInConstrainedTypeException(factory.__name__)
return factory(*args, **kwargs)
return wrapper
def field_type_unwanted(type_: Any) -> bool:
"""Very rough attempt to detect if a type is unwanted as a Pydantic annotation.
At present, we exclude types which will coerce, or any generic type involving types
which will coerce."""
logger.debug("Is %s unwanted?")
if type_ in TYPES_THAT_PYDANTIC_WILL_COERCE_TO:
logger.debug("yes")
return True
logger.debug("Maybe. Subargs are %s", get_args(type_))
rv = any(field_type_unwanted(t) for t in get_args(type_))
logger.debug("Conclusion: %s %s unwanted", type_, "is" if rv else "is not")
return rv
class PatchedBaseModel(PydanticBaseModel):
"""A patched version of BaseModel that inspects fields after models are defined.
We complain loudly if we see an unwanted type.
Beware: ModelField.type_ is presumably private; this is likely to be very brittle.
"""
@classmethod
def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object):
for field in cls.__fields__.values():
# Note that field.type_ and field.outer_type are computed based on the
# annotation type, see pydantic.fields.ModelField._type_analysis
if field_type_unwanted(field.outer_type_):
# TODO: this only reports the first bad field. Can we find all bad ones
# and report them all?
raise FieldHasUnwantedTypeException(
f"{cls.__module__}.{cls.__qualname__} has field '{field.name}' "
f"with unwanted type `{field.outer_type_}`"
)
@contextmanager
def monkeypatch_pydantic() -> Generator[None, None, None]:
"""Patch pydantic with our snooping versions of BaseModel and the con* functions.
If the snooping functions see something they don't like, they'll raise a
ModelCheckingException instance.
"""
with contextlib.ExitStack() as patches:
# Most Synapse code ought to import the patched objects directly from
# `pydantic`. But we also patch their containing modules `pydantic.main` and
# `pydantic.types` for completeness.
patch_basemodel = unittest.mock.patch(
"synapse._pydantic_compat.BaseModel", new=PatchedBaseModel
)
patches.enter_context(patch_basemodel)
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
wrapper: Callable = make_wrapper(factory)
patch = unittest.mock.patch(
f"synapse._pydantic_compat.{factory.__name__}", new=wrapper
)
patches.enter_context(patch)
yield
def format_model_checker_exception(e: ModelCheckerException) -> str:
"""Work out which line of code caused e. Format the line in a human-friendly way."""
# TODO. FieldHasUnwantedTypeException gives better error messages. Can we ditch the
# patches of constr() etc, and instead inspect fields to look for ConstrainedStr
# with strict=False? There is some difficulty with the inheritance hierarchy
# because StrictStr < ConstrainedStr < str.
if isinstance(e, FieldHasUnwantedTypeException):
return e.message
elif isinstance(e, MissingStrictInConstrainedTypeException):
frame_summary = traceback.extract_tb(e.__traceback__)[-2]
return (
f"Missing `strict=True` from {e.factory_name}() call \n"
+ traceback.format_list([frame_summary])[0].lstrip()
)
else:
raise ValueError(f"Unknown exception {e}") from e
def lint() -> int:
"""Try to import all of Synapse and see if we spot any Pydantic type coercions.
Print any problems, then return a status code suitable for sys.exit."""
failures = do_lint()
if failures:
print(f"Found {len(failures)} problem(s)")
for failure in sorted(failures):
print(failure)
return os.EX_DATAERR if failures else os.EX_OK
def do_lint() -> set[str]:
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
failures = set()
with monkeypatch_pydantic():
logger.debug("Importing synapse")
try:
# TODO: make "synapse" an argument so we can target this script at
# a subpackage
module = importlib.import_module("synapse")
except ModelCheckerException as e:
logger.warning("Bad annotation found when importing synapse")
failures.add(format_model_checker_exception(e))
return failures
try:
logger.debug("Fetching subpackages")
module_infos = list(
pkgutil.walk_packages(module.__path__, f"{module.__name__}.")
)
except ModelCheckerException as e:
logger.warning("Bad annotation found when looking for modules to import")
failures.add(format_model_checker_exception(e))
return failures
for module_info in module_infos:
logger.debug("Importing %s", module_info.name)
try:
importlib.import_module(module_info.name)
except ModelCheckerException as e:
logger.warning(
"Bad annotation found when importing %s", module_info.name
)
failures.add(format_model_checker_exception(e))
return failures
def run_test_snippet(source: str) -> None:
"""Exec a snippet of source code in an isolated environment."""
# To emulate `source` being called at the top level of the module,
# the globals and locals we provide apparently have to be the same mapping.
#
# > Remember that at the module level, globals and locals are the same dictionary.
# > If exec gets two separate objects as globals and locals, the code will be
# > executed as if it were embedded in a class definition.
globals_: dict[str, object]
locals_: dict[str, object]
globals_ = locals_ = {}
exec(textwrap.dedent(source), globals_, locals_)
class TestConstrainedTypesPatch(unittest.TestCase):
def test_expression_without_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr()
"""
)
def test_called_as_module_attribute_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
import pydantic
pydantic.constr()
"""
)
def test_wildcard_import_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import *
except ImportError:
from pydantic import *
constr()
"""
)
def test_alternative_import_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1.types import constr
except ImportError:
from pydantic.types import constr
constr()
"""
)
def test_alternative_import_attribute_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import types as pydantic_types
except ImportError:
from pydantic import types as pydantic_types
pydantic_types.constr()
"""
)
def test_kwarg_but_no_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr(min_length=10)
"""
)
def test_kwarg_strict_False_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr(strict=False)
"""
)
def test_kwarg_strict_True_doesnt_raise(self) -> None:
with monkeypatch_pydantic():
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr(strict=True)
"""
)
def test_annotation_without_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
x: constr()
"""
)
def test_field_annotation_without_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import BaseModel, conint
except ImportError:
from pydantic import BaseModel, conint
class C:
x: conint()
"""
)
class TestFieldTypeInspection(unittest.TestCase):
@parameterized.expand(
[
("str",),
("bytes"),
("int",),
("float",),
("bool"),
("Optional[str]",),
("Union[None, str]",),
("list[str]",),
("list[list[str]]",),
("dict[StrictStr, str]",),
("dict[str, StrictStr]",),
("TypedDict('D', x=int)",),
]
)
def test_field_holding_unwanted_type_raises(self, annotation: str) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
f"""
from typing import *
try:
from pydantic.v1 import *
except ImportError:
from pydantic import *
class C(BaseModel):
f: {annotation}
"""
)
@parameterized.expand(
[
("StrictStr",),
("StrictBytes"),
("StrictInt",),
("StrictFloat",),
("StrictBool"),
("constr(strict=True, min_length=10)",),
("Optional[StrictStr]",),
("Union[None, StrictStr]",),
("list[StrictStr]",),
("list[list[StrictStr]]",),
("dict[StrictStr, StrictStr]",),
("TypedDict('D', x=StrictInt)",),
]
)
def test_field_holding_accepted_type_doesnt_raise(self, annotation: str) -> None:
with monkeypatch_pydantic():
run_test_snippet(
f"""
from typing import *
try:
from pydantic.v1 import *
except ImportError:
from pydantic import *
class C(BaseModel):
f: {annotation}
"""
)
def test_field_holding_str_raises_with_alternative_import(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1.main import BaseModel
except ImportError:
from pydantic.main import BaseModel
class C(BaseModel):
f: str
"""
)
parser = argparse.ArgumentParser()
parser.add_argument("mode", choices=["lint", "test"], default="lint", nargs="?")
parser.add_argument("-v", "--verbose", action="store_true")
if __name__ == "__main__":
args = parser.parse_args(sys.argv[1:])
logging.basicConfig(
format="%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s",
level=logging.DEBUG if args.verbose else logging.INFO,
)
# suppress logs we don't care about
logging.getLogger("xmlschema").setLevel(logging.WARNING)
if args.mode == "lint":
sys.exit(lint())
elif args.mode == "test":
unittest.main(argv=sys.argv[:1])

View File

@@ -134,9 +134,6 @@ fi
# Ensure the formatting of Rust code.
cargo-fmt
# Ensure all Pydantic models use strict types.
./scripts-dev/check_pydantic_models.py lint
# Ensure type hints are correct.
mypy

View File

@@ -1,104 +0,0 @@
#
# This file is licensed under the Affero General Public License (AGPL) version 3.
#
# Copyright 2023 Maxwell G <maxwell@gtmx.me>
# Copyright (C) 2023 New Vector, Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# See the GNU Affero General Public License for more details:
# <https://www.gnu.org/licenses/agpl-3.0.html>.
#
# Originally licensed under the Apache License, Version 2.0:
# <http://www.apache.org/licenses/LICENSE-2.0>.
#
# [This file includes modifications made by New Vector Limited]
#
#
from typing import TYPE_CHECKING
from packaging.version import Version
try:
from pydantic import __version__ as pydantic_version
except ImportError:
import importlib.metadata
pydantic_version = importlib.metadata.version("pydantic")
HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import (
AnyHttpUrl,
BaseModel,
Extra,
Field,
FilePath,
MissingError,
PydanticValueError,
StrictBool,
StrictInt,
StrictStr,
ValidationError,
conbytes,
confloat,
conint,
constr,
parse_obj_as,
root_validator,
validator,
)
from pydantic.v1.error_wrappers import ErrorWrapper
from pydantic.v1.typing import get_args
else:
from pydantic import (
AnyHttpUrl,
BaseModel,
Extra,
Field,
FilePath,
MissingError,
PydanticValueError,
StrictBool,
StrictInt,
StrictStr,
ValidationError,
conbytes,
confloat,
conint,
constr,
parse_obj_as,
root_validator,
validator,
)
from pydantic.error_wrappers import ErrorWrapper
from pydantic.typing import get_args
__all__ = (
"HAS_PYDANTIC_V2",
"AnyHttpUrl",
"BaseModel",
"constr",
"conbytes",
"conint",
"confloat",
"ErrorWrapper",
"Extra",
"Field",
"FilePath",
"get_args",
"MissingError",
"parse_obj_as",
"PydanticValueError",
"StrictBool",
"StrictInt",
"StrictStr",
"ValidationError",
"validator",
"root_validator",
)

View File

@@ -16,14 +16,16 @@ import logging
from typing import TYPE_CHECKING, Optional
from urllib.parse import urlencode
from synapse._pydantic_compat import (
from pydantic import (
AnyHttpUrl,
BaseModel,
Extra,
ConfigDict,
StrictBool,
StrictInt,
StrictStr,
ValidationError,
)
from synapse.api.auth.base import BaseAuth
from synapse.api.errors import (
AuthError,
@@ -63,8 +65,7 @@ STABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:client:device:"
class ServerMetadata(BaseModel):
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
issuer: StrictStr
account_management_uri: StrictStr
@@ -73,14 +74,12 @@ class ServerMetadata(BaseModel):
class IntrospectionResponse(BaseModel):
retrieved_at_ms: StrictInt
active: StrictBool
scope: Optional[StrictStr]
username: Optional[StrictStr]
sub: Optional[StrictStr]
device_id: Optional[StrictStr]
expires_in: Optional[StrictInt]
class Config:
extra = Extra.allow
scope: Optional[StrictStr] = None
username: Optional[StrictStr] = None
sub: Optional[StrictStr] = None
device_id: Optional[StrictStr] = None
expires_in: Optional[StrictInt] = None
model_config = ConfigDict(extra="allow")
def get_scope_set(self) -> set[str]:
if not self.scope:
@@ -148,11 +147,33 @@ class MasDelegatedAuth(BaseAuth):
@property
def _metadata_url(self) -> str:
return f"{self._config.endpoint.rstrip('/')}/.well-known/openid-configuration"
return str(
AnyHttpUrl.build(
scheme=self._config.endpoint.scheme,
username=self._config.endpoint.username,
password=self._config.endpoint.password,
host=self._config.endpoint.host or "",
port=self._config.endpoint.port,
path=".well-known/openid-configuration",
query=None,
fragment=None,
)
)
@property
def _introspection_endpoint(self) -> str:
return f"{self._config.endpoint.rstrip('/')}/oauth2/introspect"
return str(
AnyHttpUrl.build(
scheme=self._config.endpoint.scheme,
username=self._config.endpoint.username,
password=self._config.endpoint.password,
host=self._config.endpoint.host or "",
port=self._config.endpoint.port,
path="oauth2/introspect",
query=None,
fragment=None,
)
)
async def _load_metadata(self) -> ServerMetadata:
response = await self._http_client.get_json(self._metadata_url)

View File

@@ -21,8 +21,8 @@
from typing import Any, TypeVar
import jsonschema
from pydantic import BaseModel, TypeAdapter, ValidationError
from synapse._pydantic_compat import BaseModel, ValidationError, parse_obj_as
from synapse.config._base import ConfigError
from synapse.types import JsonDict, StrSequence
@@ -93,7 +93,7 @@ def parse_and_validate_mapping(
try:
# type-ignore: mypy doesn't like constructing `Dict[str, model_type]` because
# `model_type` is a runtime variable. Pydantic is fine with this.
instances = parse_obj_as(dict[str, model_type], config) # type: ignore[valid-type]
instances = TypeAdapter(dict[str, model_type]).validate_python(config) # type: ignore[valid-type]
except ValidationError as e:
raise ConfigError(str(e)) from e
return instances

View File

@@ -15,15 +15,17 @@
from typing import Any, Optional
from synapse._pydantic_compat import (
from pydantic import (
AnyHttpUrl,
Field,
FilePath,
StrictBool,
StrictStr,
ValidationError,
validator,
model_validator,
)
from typing_extensions import Self
from synapse.config.experimental import read_secret_from_file_once
from synapse.types import JsonDict
from synapse.util.pydantic_models import ParseModel
@@ -33,27 +35,24 @@ from ._base import Config, ConfigError, RootConfig
class MasConfigModel(ParseModel):
enabled: StrictBool = False
endpoint: AnyHttpUrl = Field(default="http://localhost:8080")
endpoint: AnyHttpUrl = AnyHttpUrl("http://localhost:8080")
secret: Optional[StrictStr] = Field(default=None)
secret_path: Optional[FilePath] = Field(default=None)
@validator("secret")
def validate_secret_is_set_if_enabled(cls, v: Any, values: dict) -> Any:
if values.get("enabled", False) and not values.get("secret_path") and not v:
@model_validator(mode="after")
def verify_secret(self) -> Self:
if not self.enabled:
return self
if not self.secret and not self.secret_path:
raise ValueError(
"You must set a `secret` or `secret_path` when enabling Matrix Authentication Service integration."
"You must set a `secret` or `secret_path` when enabling the Matrix "
"Authentication Service integration."
)
return v
@validator("secret_path")
def validate_secret_path_is_set_if_enabled(cls, v: Any, values: dict) -> Any:
if values.get("secret"):
if self.secret and self.secret_path:
raise ValueError(
"`secret` and `secret_path` cannot be set at the same time."
)
return v
return self
class MasConfig(Config):

View File

@@ -17,9 +17,9 @@
from typing import Any, Optional
from pydantic import ValidationError
from pydantic import Field, StrictStr, ValidationError, model_validator
from typing_extensions import Self
from synapse._pydantic_compat import Field, StrictStr, validator
from synapse.types import JsonDict
from synapse.util.pydantic_models import ParseModel
@@ -32,14 +32,13 @@ class TransportConfigModel(ParseModel):
livekit_service_url: Optional[StrictStr] = Field(default=None)
"""An optional livekit service URL. Only required if type is "livekit"."""
@validator("livekit_service_url", always=True)
def validate_livekit_service_url(cls, v: Any, values: dict) -> Any:
if values.get("type") == "livekit" and not v:
@model_validator(mode="after")
def validate_livekit_service_url(self) -> Self:
if self.type == "livekit" and not self.livekit_service_url:
raise ValueError(
"You must set a `livekit_service_url` when using the 'livekit' transport."
)
return v
return self
class MatrixRtcConfigModel(ParseModel):

View File

@@ -25,12 +25,12 @@ import logging
from typing import Any, Optional, Union
import attr
from synapse._pydantic_compat import (
from pydantic import (
StrictBool,
StrictInt,
StrictStr,
)
from synapse.config._base import (
Config,
ConfigError,

View File

@@ -22,8 +22,8 @@ import collections.abc
from typing import Union, cast
import jsonschema
from pydantic import Field, StrictBool, StrictStr
from synapse._pydantic_compat import Field, StrictBool, StrictStr
from synapse.api.constants import (
MAX_ALIAS_LENGTH,
EventContentFields,

View File

@@ -35,15 +35,10 @@ from typing import (
overload,
)
from pydantic import BaseModel, ValidationError
from twisted.web.server import Request
from synapse._pydantic_compat import (
BaseModel,
ErrorWrapper,
MissingError,
PydanticValueError,
ValidationError,
)
from synapse.api.errors import Codes, SynapseError
from synapse.http import redact_uri
from synapse.http.server import HttpServer
@@ -897,20 +892,20 @@ def validate_json_object(content: JsonDict, model_type: type[Model]) -> Model:
if it wasn't a JSON object.
"""
try:
instance = model_type.parse_obj(content)
instance = model_type.model_validate(content)
except ValidationError as e:
err_type = e.errors()[0]["type"]
# Choose a matrix error code. The catch-all is BAD_JSON, but we try to find a
# more specific error if possible (which occasionally helps us to be spec-
# compliant) This is a bit awkward because the spec's error codes aren't very
# clear-cut: BAD_JSON arguably overlaps with MISSING_PARAM and INVALID_PARAM.
errcode = Codes.BAD_JSON
raw_errors = e.raw_errors
if len(raw_errors) == 1 and isinstance(raw_errors[0], ErrorWrapper):
raw_error = raw_errors[0].exc
if isinstance(raw_error, MissingError):
if e.error_count() == 1:
if err_type == "missing":
errcode = Codes.MISSING_PARAM
elif isinstance(raw_error, PydanticValueError):
elif err_type == "value_error":
errcode = Codes.INVALID_PARAM
raise SynapseError(HTTPStatus.BAD_REQUEST, str(e), errcode=errcode)

View File

@@ -26,8 +26,8 @@ from http import HTTPStatus
from typing import TYPE_CHECKING, Optional, Union
import attr
from pydantic import StrictBool, StrictInt, StrictStr
from synapse._pydantic_compat import StrictBool, StrictInt, StrictStr
from synapse.api.constants import Direction
from synapse.api.errors import Codes, NotFoundError, SynapseError
from synapse.http.servlet import (
@@ -1476,9 +1476,9 @@ class RedactUser(RestServlet):
class PostBody(RequestBodyModel):
rooms: list[StrictStr]
reason: Optional[StrictStr]
limit: Optional[StrictInt]
use_admin: Optional[StrictBool]
reason: Optional[StrictStr] = None
limit: Optional[StrictInt] = None
use_admin: Optional[StrictBool] = None
async def on_POST(
self, request: SynapseRequest, user_id: str

View File

@@ -25,10 +25,11 @@ from typing import TYPE_CHECKING, Literal, Optional
from urllib.parse import urlparse
import attr
from pydantic import StrictBool, StrictStr, StringConstraints
from typing_extensions import Annotated
from twisted.web.server import Request
from synapse._pydantic_compat import StrictBool, StrictStr, constr
from synapse.api.constants import LoginType
from synapse.api.errors import (
Codes,
@@ -162,11 +163,9 @@ class PasswordRestServlet(RestServlet):
class PostBody(RequestBodyModel):
auth: Optional[AuthenticationData] = None
logout_devices: StrictBool = True
if TYPE_CHECKING:
# workaround for https://github.com/samuelcolvin/pydantic/issues/156
new_password: Optional[StrictStr] = None
else:
new_password: Optional[constr(max_length=512, strict=True)] = None
new_password: Optional[
Annotated[str, StringConstraints(max_length=512, strict=True)]
] = None
@interactive_auth_handler
async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]:

View File

@@ -24,7 +24,8 @@ import logging
from http import HTTPStatus
from typing import TYPE_CHECKING, Optional
from synapse._pydantic_compat import Extra, StrictStr
from pydantic import ConfigDict, StrictStr
from synapse.api import errors
from synapse.api.errors import NotFoundError, SynapseError, UnrecognizedRequestError
from synapse.http.server import HttpServer
@@ -94,7 +95,7 @@ class DeleteDevicesRestServlet(RestServlet):
self.auth_handler = hs.get_auth_handler()
class PostBody(RequestBodyModel):
auth: Optional[AuthenticationData]
auth: Optional[AuthenticationData] = None
devices: list[StrictStr]
@interactive_auth_handler
@@ -108,7 +109,7 @@ class DeleteDevicesRestServlet(RestServlet):
# TODO: Can/should we remove this fallback now?
# deal with older clients which didn't pass a JSON dict
# the same as those that pass an empty dict
body = self.PostBody.parse_obj({})
body = self.PostBody.model_validate({})
else:
raise e
@@ -172,7 +173,7 @@ class DeviceRestServlet(RestServlet):
return 200, device
class DeleteBody(RequestBodyModel):
auth: Optional[AuthenticationData]
auth: Optional[AuthenticationData] = None
@interactive_auth_handler
async def on_DELETE(
@@ -188,7 +189,7 @@ class DeviceRestServlet(RestServlet):
# TODO: can/should we remove this fallback now?
# deal with older clients which didn't pass a JSON dict
# the same as those that pass an empty dict
body = self.DeleteBody.parse_obj({})
body = self.DeleteBody.model_validate({})
else:
raise
@@ -217,7 +218,7 @@ class DeviceRestServlet(RestServlet):
return 200, {}
class PutBody(RequestBodyModel):
display_name: Optional[StrictStr]
display_name: Optional[StrictStr] = None
async def on_PUT(
self, request: SynapseRequest, device_id: str
@@ -247,8 +248,7 @@ class DehydratedDeviceDataModel(RequestBodyModel):
Expects other freeform fields. Use .dict() to access them.
"""
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
algorithm: StrictStr
@@ -316,7 +316,7 @@ class DehydratedDeviceServlet(RestServlet):
class PutBody(RequestBodyModel):
device_data: DehydratedDeviceDataModel
initial_device_display_name: Optional[StrictStr]
initial_device_display_name: Optional[StrictStr] = None
async def on_PUT(self, request: SynapseRequest) -> tuple[int, JsonDict]:
submission = parse_and_validate_json_object_from_request(request, self.PutBody)
@@ -391,7 +391,7 @@ class DehydratedDeviceEventsServlet(RestServlet):
self.store = hs.get_datastores().main
class PostBody(RequestBodyModel):
next_batch: Optional[StrictStr]
next_batch: Optional[StrictStr] = None
async def on_POST(
self, request: SynapseRequest, device_id: str
@@ -539,9 +539,7 @@ class DehydratedDeviceV2Servlet(RestServlet):
device_data: DehydratedDeviceDataModel
device_id: StrictStr
initial_device_display_name: Optional[StrictStr]
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
async def on_PUT(self, request: SynapseRequest) -> tuple[int, JsonDict]:
submission = parse_and_validate_json_object_from_request(request, self.PutBody)

View File

@@ -22,9 +22,10 @@
import logging
from typing import TYPE_CHECKING, Literal, Optional
from pydantic import StrictStr
from twisted.web.server import Request
from synapse._pydantic_compat import StrictStr
from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError
from synapse.http.server import HttpServer
from synapse.http.servlet import (

View File

@@ -26,13 +26,8 @@ from collections import Counter
from http import HTTPStatus
from typing import TYPE_CHECKING, Any, Mapping, Optional, Union
from typing_extensions import Self
from pydantic import StrictBool, StrictStr, field_validator
from synapse._pydantic_compat import (
StrictBool,
StrictStr,
validator,
)
from synapse.api.auth.mas import MasDelegatedAuth
from synapse.api.errors import (
Codes,
@@ -164,7 +159,7 @@ class KeyUploadServlet(RestServlet):
device_keys: Optional[DeviceKeys] = None
"""Identity keys for the device. May be absent if no new identity keys are required."""
fallback_keys: Optional[Mapping[StrictStr, Union[StrictStr, KeyObject]]]
fallback_keys: Optional[Mapping[StrictStr, Union[StrictStr, KeyObject]]] = None
"""
The public key which should be used if the device's one-time keys are
exhausted. The fallback key is not deleted once used, but should be
@@ -180,8 +175,9 @@ class KeyUploadServlet(RestServlet):
May be absent if a new fallback key is not required.
"""
@validator("fallback_keys", pre=True)
def validate_fallback_keys(cls: Self, v: Any) -> Any:
@field_validator("fallback_keys", mode="before")
@classmethod
def validate_fallback_keys(cls, v: Any) -> Any:
if v is None:
return v
if not isinstance(v, dict):
@@ -206,8 +202,9 @@ class KeyUploadServlet(RestServlet):
https://spec.matrix.org/v1.16/client-server-api/#key-algorithms.
"""
@validator("one_time_keys", pre=True)
def validate_one_time_keys(cls: Self, v: Any) -> Any:
@field_validator("one_time_keys", mode="before")
@classmethod
def validate_one_time_keys(cls, v: Any) -> Any:
if v is None:
return v
if not isinstance(v, dict):

View File

@@ -23,7 +23,8 @@ import logging
from http import HTTPStatus
from typing import TYPE_CHECKING
from synapse._pydantic_compat import StrictStr
from pydantic import StrictStr
from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError
from synapse.http.server import HttpServer
from synapse.http.servlet import (

View File

@@ -50,7 +50,7 @@ class ThreadSubscriptionsRestServlet(RestServlet):
self.handler = hs.get_thread_subscriptions_handler()
class PutBody(RequestBodyModel):
automatic: Optional[AnyEventId]
automatic: Optional[AnyEventId] = None
"""
If supplied, the event ID of an event giving rise to this automatic subscription.

View File

@@ -23,11 +23,11 @@ import logging
import re
from typing import TYPE_CHECKING, Mapping, Optional
from pydantic import ConfigDict, StrictInt, StrictStr
from signedjson.sign import sign_json
from twisted.web.server import Request
from synapse._pydantic_compat import Extra, StrictInt, StrictStr
from synapse.crypto.keyring import ServerKeyFetcher
from synapse.http.server import HttpServer
from synapse.http.servlet import (
@@ -48,8 +48,7 @@ logger = logging.getLogger(__name__)
class _KeyQueryCriteriaDataModel(RequestBodyModel):
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
minimum_valid_until_ts: Optional[StrictInt]

View File

@@ -17,7 +17,8 @@ import logging
from http import HTTPStatus
from typing import TYPE_CHECKING, Optional
from synapse._pydantic_compat import StrictStr
from pydantic import StrictStr
from synapse.api.errors import NotFoundError
from synapse.http.servlet import parse_and_validate_json_object_from_request
from synapse.types import JsonDict, UserID
@@ -52,7 +53,7 @@ class MasUpsertDeviceResource(MasBaseResource):
class PostBody(RequestBodyModel):
localpart: StrictStr
device_id: StrictStr
display_name: Optional[StrictStr]
display_name: Optional[StrictStr] = None
async def _async_render_POST(
self, request: "SynapseRequest"
@@ -176,7 +177,7 @@ class MasSyncDevicesResource(MasBaseResource):
class PostBody(RequestBodyModel):
localpart: StrictStr
devices: set[StrictStr]
devices: list[str]
async def _async_render_POST(
self, request: "SynapseRequest"

View File

@@ -17,7 +17,8 @@ import logging
from http import HTTPStatus
from typing import TYPE_CHECKING, Any, Optional, TypedDict
from synapse._pydantic_compat import StrictBool, StrictStr, root_validator
from pydantic import StrictBool, StrictStr, model_validator
from synapse.api.errors import NotFoundError, SynapseError
from synapse.http.servlet import (
parse_and_validate_json_object_from_request,
@@ -111,7 +112,8 @@ class MasProvisionUserResource(MasBaseResource):
unset_emails: StrictBool = False
set_emails: Optional[list[StrictStr]] = None
@root_validator(pre=True)
@model_validator(mode="before")
@classmethod
def validate_exclusive(cls, values: Any) -> Any:
if "unset_displayname" in values and "set_displayname" in values:
raise ValueError(

View File

@@ -35,8 +35,8 @@ from typing import (
)
import attr
from pydantic import BaseModel
from synapse._pydantic_compat import BaseModel
from synapse.storage.engines import PostgresEngine
from synapse.storage.types import Connection, Cursor
from synapse.types import JsonDict, StrCollection
@@ -954,7 +954,7 @@ class BackgroundUpdater:
# match the constraint.
# 3. We try re-validating the constraint.
parsed_progress = ValidateConstraintProgress.parse_obj(progress)
parsed_progress = ValidateConstraintProgress.model_validate(progress)
if parsed_progress.state == ValidateConstraintProgress.State.check:
return_columns = ", ".join(unique_columns)

View File

@@ -32,8 +32,8 @@ from typing import (
)
import attr
from pydantic import ConfigDict
from synapse._pydantic_compat import Extra
from synapse.api.constants import EventTypes
from synapse.events import EventBase
from synapse.types import (
@@ -65,15 +65,12 @@ class SlidingSyncConfig(SlidingSyncBody):
user: UserID
requester: Requester
# Pydantic config
class Config:
# By default, ignore fields that we don't recognise.
extra = Extra.ignore
# By default, don't allow fields to be reassigned after parsing.
allow_mutation = False
# Allow custom types like `UserID` to be used in the model
arbitrary_types_allowed = True
model_config = ConfigDict(
extra="ignore",
frozen=True,
# Allow custom types like `UserID` to be used in the model.
arbitrary_types_allowed=True,
)
class OperationType(Enum):

View File

@@ -18,18 +18,21 @@
# [This file includes modifications made by New Vector Limited]
#
#
from typing import TYPE_CHECKING, Optional, Union
from typing import Optional, Union
from synapse._pydantic_compat import (
Extra,
from pydantic import (
ConfigDict,
Field,
StrictBool,
StrictInt,
StrictStr,
conint,
constr,
validator,
StringConstraints,
field_validator,
model_validator,
)
from pydantic_core import PydanticCustomError
from typing_extensions import Annotated, Self
from synapse.types.rest import RequestBodyModel
from synapse.util.threepids import validate_email
@@ -44,39 +47,36 @@ class AuthenticationData(RequestBodyModel):
`.dict(exclude_unset=True)` to access them.
"""
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
session: Optional[StrictStr] = None
type: Optional[StrictStr] = None
if TYPE_CHECKING:
ClientSecretStr = StrictStr
else:
# See also assert_valid_client_secret()
ClientSecretStr = constr(
regex="[0-9a-zA-Z.=_-]", # noqa: F722
# See also assert_valid_client_secret()
ClientSecretStr = Annotated[
str,
StringConstraints(
pattern="[0-9a-zA-Z.=_-]",
min_length=1,
max_length=255,
strict=True,
)
),
]
class ThreepidRequestTokenBody(RequestBodyModel):
client_secret: ClientSecretStr
id_server: Optional[StrictStr]
id_access_token: Optional[StrictStr]
next_link: Optional[StrictStr]
id_server: Optional[StrictStr] = None
id_access_token: Optional[StrictStr] = None
next_link: Optional[StrictStr] = None
send_attempt: StrictInt
@validator("id_access_token", always=True)
def token_required_for_identity_server(
cls, token: Optional[str], values: dict[str, object]
) -> Optional[str]:
if values.get("id_server") is not None and token is None:
@model_validator(mode="after")
def token_required_for_identity_server(self) -> Self:
if self.id_server is not None and self.id_access_token is None:
raise ValueError("id_access_token is required if an id_server is supplied.")
return token
return self
class EmailRequestTokenBody(ThreepidRequestTokenBody):
@@ -87,14 +87,21 @@ class EmailRequestTokenBody(ThreepidRequestTokenBody):
# know the exact spelling (eg. upper and lower case) of address in the database.
# Without this, an email stored in the database as "foo@bar.com" would cause
# user requests for "FOO@bar.com" to raise a Not Found error.
_email_validator = validator("email", allow_reuse=True)(validate_email)
@field_validator("email")
@classmethod
def _email_validator(cls, email: StrictStr) -> StrictStr:
try:
return validate_email(email)
except ValueError as e:
# To ensure backward compatibility of HTTP error codes, we return a
# Pydantic error with the custom, unrecognized error type
# "email_custom_err_type" instead of the default error type
# "value_error". This results in the more generic BAD_JSON HTTP
# error instead of the more specific INVALID_PARAM one.
raise PydanticCustomError("email_custom_err_type", str(e), None) from e
if TYPE_CHECKING:
ISO3116_1_Alpha_2 = StrictStr
else:
# Per spec: two-letter uppercase ISO-3166-1-alpha-2
ISO3116_1_Alpha_2 = constr(regex="[A-Z]{2}", strict=True)
ISO3116_1_Alpha_2 = Annotated[str, StringConstraints(pattern="[A-Z]{2}", strict=True)]
class MsisdnRequestTokenBody(ThreepidRequestTokenBody):
@@ -144,12 +151,10 @@ class SlidingSyncBody(RequestBodyModel):
(Max 1000 messages)
"""
required_state: list[tuple[StrictStr, StrictStr]]
# mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884
if TYPE_CHECKING:
timeline_limit: int
else:
timeline_limit: conint(le=1000, strict=True) # type: ignore[valid-type]
required_state: list[
Annotated[tuple[StrictStr, StrictStr], Field(strict=False)]
]
timeline_limit: Annotated[int, Field(le=1000, strict=True)]
class SlidingSyncList(CommonRoomParameters):
"""
@@ -251,13 +256,17 @@ class SlidingSyncBody(RequestBodyModel):
tags: Optional[list[StrictStr]] = None
not_tags: Optional[list[StrictStr]] = None
# mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884
if TYPE_CHECKING:
ranges: Optional[list[tuple[int, int]]] = None
else:
ranges: Optional[
list[tuple[conint(ge=0, strict=True), conint(ge=0, strict=True)]]
] = None # type: ignore[valid-type]
ranges: Optional[
list[
Annotated[
tuple[
Annotated[int, Field(ge=0, strict=True)],
Annotated[int, Field(ge=0, strict=True)],
],
Field(strict=False),
]
]
] = None
slow_get_all_rooms: Optional[StrictBool] = False
filters: Optional[Filters] = None
@@ -286,7 +295,8 @@ class SlidingSyncBody(RequestBodyModel):
limit: StrictInt = 100
since: Optional[StrictStr] = None
@validator("since")
@field_validator("since")
@classmethod
def since_token_check(
cls, value: Optional[StrictStr]
) -> Optional[StrictStr]:
@@ -382,22 +392,21 @@ class SlidingSyncBody(RequestBodyModel):
receipts: Optional[ReceiptsExtension] = None
typing: Optional[TypingExtension] = None
thread_subscriptions: Optional[ThreadSubscriptionsExtension] = Field(
alias="io.element.msc4308.thread_subscriptions"
None, alias="io.element.msc4308.thread_subscriptions"
)
conn_id: Optional[StrictStr]
# mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884
if TYPE_CHECKING:
lists: Optional[dict[str, SlidingSyncList]] = None
else:
lists: Optional[dict[constr(max_length=64, strict=True), SlidingSyncList]] = (
None # type: ignore[valid-type]
)
conn_id: Optional[StrictStr] = None
lists: Optional[
dict[
Annotated[str, StringConstraints(max_length=64, strict=True)],
SlidingSyncList,
]
] = None
room_subscriptions: Optional[dict[StrictStr, RoomSubscription]] = None
extensions: Optional[Extensions] = None
@validator("lists")
@field_validator("lists")
@classmethod
def lists_length_check(
cls, value: Optional[dict[str, SlidingSyncList]]
) -> Optional[dict[str, SlidingSyncList]]:

View File

@@ -15,7 +15,8 @@
from typing import Any, Optional
from synapse._pydantic_compat import Field, StrictStr, ValidationError, validator
from pydantic import Field, StrictStr, ValidationError, field_validator
from synapse.types import JsonDict
from synapse.util.pydantic_models import ParseModel
from synapse.util.stringutils import random_string
@@ -40,7 +41,7 @@ class MTextRepresentation(ParseModel):
"""
body: StrictStr
mimetype: Optional[StrictStr]
mimetype: Optional[StrictStr] = None
class MTopic(ParseModel):
@@ -52,7 +53,7 @@ class MTopic(ParseModel):
See `TopicContentBlock` in the Matrix specification.
"""
m_text: Optional[list[MTextRepresentation]] = Field(alias="m.text")
m_text: Optional[list[MTextRepresentation]] = Field(None, alias="m.text")
"""
An ordered array of textual representations in different mimetypes.
"""
@@ -60,16 +61,17 @@ class MTopic(ParseModel):
# Because "Receivers SHOULD use the first representation in the array that they
# understand.", we ignore invalid representations in the `m.text` field and use
# what we can.
@validator("m_text", pre=True)
@field_validator("m_text", mode="before")
@classmethod
def ignore_invalid_representations(
cls, m_text: Any
) -> Optional[list[MTextRepresentation]]:
if not isinstance(m_text, list):
raise ValueError("m.text must be a list")
if not isinstance(m_text, (list, tuple)):
raise ValueError("m.text must be a list or a tuple")
representations = []
for element in m_text:
try:
representations.append(MTextRepresentation.parse_obj(element))
representations.append(MTextRepresentation.model_validate(element))
except ValidationError:
continue
return representations
@@ -85,17 +87,18 @@ class TopicContent(ParseModel):
The topic in plain text.
"""
m_topic: Optional[MTopic] = Field(alias="m.topic")
m_topic: Optional[MTopic] = Field(None, alias="m.topic")
"""
Textual representation of the room topic in different mimetypes.
"""
# We ignore invalid `m.topic` fields as we can always fall back to the plain-text
# `topic` field.
@validator("m_topic", pre=True)
@field_validator("m_topic", mode="before")
@classmethod
def ignore_invalid_m_topic(cls, m_topic: Any) -> Optional[MTopic]:
try:
return MTopic.parse_obj(m_topic)
return MTopic.model_validate(m_topic)
except ValidationError:
return None
@@ -114,7 +117,7 @@ def get_plain_text_topic_from_event_content(content: JsonDict) -> Optional[str]:
"""
try:
topic_content = TopicContent.parse_obj(content)
topic_content = TopicContent.model_validate(content, strict=False)
except ValidationError:
return None

View File

@@ -13,18 +13,20 @@
#
#
import re
from typing import Any, Callable, Generator
from typing import Annotated, Union
from synapse._pydantic_compat import BaseModel, Extra, StrictStr
from pydantic import AfterValidator, BaseModel, ConfigDict, StrictStr, StringConstraints
from synapse.api.errors import SynapseError
from synapse.types import EventID
class ParseModel(BaseModel):
"""A custom version of Pydantic's BaseModel which
- ignores unknown fields and
- does not allow fields to be overwritten after construction,
- ignores unknown fields,
- does not allow fields to be overwritten after construction and
- enables strict mode,
but otherwise uses Pydantic's default behaviour.
@@ -36,48 +38,19 @@ class ParseModel(BaseModel):
https://pydantic-docs.helpmanual.io/usage/model_config/#change-behaviour-globally
"""
class Config:
# By default, ignore fields that we don't recognise.
extra = Extra.ignore
# By default, don't allow fields to be reassigned after parsing.
allow_mutation = False
model_config = ConfigDict(extra="ignore", frozen=True, strict=True)
class AnyEventId(StrictStr):
"""
A validator for strings that need to be an Event ID.
def validate_event_id_v1_and_2(value: str) -> str:
try:
EventID.from_string(value)
except SynapseError as e:
raise ValueError from e
return value
Accepts any valid grammar of Event ID from any room version.
"""
EVENT_ID_HASH_ROOM_VERSION_3_PLUS = re.compile(
r"^([a-zA-Z0-9-_]{43}|[a-zA-Z0-9+/]{43})$"
)
@classmethod
def __get_validators__(cls) -> Generator[Callable[..., Any], Any, Any]:
yield from super().__get_validators__() # type: ignore
yield cls.validate_event_id
@classmethod
def validate_event_id(cls, value: str) -> str:
if not value.startswith("$"):
raise ValueError("Event ID must start with `$`")
if ":" in value:
# Room versions 1 and 2
EventID.from_string(value) # throws on fail
else:
# Room versions 3+: event ID is $ + a base64 sha256 hash
# Room version 3 is base64, 4+ are base64Url
# In both cases, the base64 is unpadded.
# refs:
# - https://spec.matrix.org/v1.15/rooms/v3/ e.g. $acR1l0raoZnm60CBwAVgqbZqoO/mYU81xysh1u7XcJk
# - https://spec.matrix.org/v1.15/rooms/v4/ e.g. $Rqnc-F-dvnEYJTyHq_iKxU2bZ1CI92-kuZq3a5lr5Zg
b64_hash = value[1:]
if cls.EVENT_ID_HASH_ROOM_VERSION_3_PLUS.fullmatch(b64_hash) is None:
raise ValueError(
"Event ID must either have a domain part or be a valid hash"
)
return value
EventIdV1And2 = Annotated[StrictStr, AfterValidator(validate_event_id_v1_and_2)]
EventIdV3Plus = Annotated[
StrictStr, StringConstraints(pattern=r"^\$([a-zA-Z0-9-_]{43}|[a-zA-Z0-9+/]{43})$")
]
AnyEventId = Union[EventIdV1And2, EventIdV3Plus]

View File

@@ -21,6 +21,7 @@
import os
import tempfile
from pathlib import Path
from unittest.mock import Mock
from synapse.config import ConfigError
@@ -309,7 +310,9 @@ class MasAuthDelegation(TestCase):
def test_secret_and_secret_path_are_mutually_exclusive(self) -> None:
with tempfile.NamedTemporaryFile() as f:
self.config_dict["matrix_authentication_service"]["secret"] = "verysecret"
self.config_dict["matrix_authentication_service"]["secret_path"] = f.name
self.config_dict["matrix_authentication_service"]["secret_path"] = Path(
f.name
)
with self.assertRaises(ConfigError):
self.parse_config()
@@ -317,13 +320,15 @@ class MasAuthDelegation(TestCase):
with tempfile.NamedTemporaryFile(buffering=0) as f:
f.write(b"53C237")
del self.config_dict["matrix_authentication_service"]["secret"]
self.config_dict["matrix_authentication_service"]["secret_path"] = f.name
self.config_dict["matrix_authentication_service"]["secret_path"] = Path(
f.name
)
config = self.parse_config()
self.assertEqual(config.mas.secret(), "53C237")
def test_secret_path_must_exist(self) -> None:
del self.config_dict["matrix_authentication_service"]["secret"]
self.config_dict["matrix_authentication_service"]["secret_path"] = (
self.config_dict["matrix_authentication_service"]["secret_path"] = Path(
"/not/a/valid/file"
)
with self.assertRaises(ConfigError):

View File

@@ -1201,7 +1201,9 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(
HTTPStatus.BAD_REQUEST, channel.code, msg=channel.result["body"]
)
self.assertEqual(expected_errcode, channel.json_body["errcode"])
self.assertEqual(
expected_errcode, channel.json_body["errcode"], msg=channel.result["body"]
)
self.assertIn(expected_error, channel.json_body["error"])
def _validate_token(self, link: str) -> None:

View File

@@ -21,7 +21,8 @@
import unittest as stdlib_unittest
from typing import Literal
from synapse._pydantic_compat import BaseModel, ValidationError
from pydantic import BaseModel, ValidationError
from synapse.types.rest.client import EmailRequestTokenBody
@@ -35,16 +36,16 @@ class ThreepidMediumEnumTestCase(stdlib_unittest.TestCase):
This is arguably more of a test of a class that inherits from str and Enum
simultaneously.
"""
model = self.Model.parse_obj({"medium": "email"})
model = self.Model.model_validate({"medium": "email"})
self.assertEqual(model.medium, "email")
def test_rejects_invalid_medium_value(self) -> None:
with self.assertRaises(ValidationError):
self.Model.parse_obj({"medium": "interpretive_dance"})
self.Model.model_validate({"medium": "interpretive_dance"})
def test_rejects_invalid_medium_type(self) -> None:
with self.assertRaises(ValidationError):
self.Model.parse_obj({"medium": 123})
self.Model.model_validate({"medium": 123})
class EmailRequestTokenBodyTestCase(stdlib_unittest.TestCase):
@@ -56,14 +57,14 @@ class EmailRequestTokenBodyTestCase(stdlib_unittest.TestCase):
def test_token_required_if_id_server_provided(self) -> None:
with self.assertRaises(ValidationError):
EmailRequestTokenBody.parse_obj(
EmailRequestTokenBody.model_validate(
{
**self.base_request,
"id_server": "identity.wonderland.com",
}
)
with self.assertRaises(ValidationError):
EmailRequestTokenBody.parse_obj(
EmailRequestTokenBody.model_validate(
{
**self.base_request,
"id_server": "identity.wonderland.com",
@@ -73,7 +74,7 @@ class EmailRequestTokenBodyTestCase(stdlib_unittest.TestCase):
def test_token_typechecked_when_id_server_provided(self) -> None:
with self.assertRaises(ValidationError):
EmailRequestTokenBody.parse_obj(
EmailRequestTokenBody.model_validate(
{
**self.base_request,
"id_server": "identity.wonderland.com",

View File

@@ -111,7 +111,7 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
{},
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
# Assert the subscription was saved
channel = self.make_request(
@@ -119,8 +119,8 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription",
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.json_body, {"automatic": False})
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
self.assertEqual(channel.json_body, {"automatic": False}, channel.json_body)
# Now also register an automatic subscription; it should not
# override the manual subscription
@@ -130,7 +130,7 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
{"automatic": self.threaded_events[0]},
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
# Assert the manual subscription was not overridden
channel = self.make_request(
@@ -138,8 +138,8 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription",
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.json_body, {"automatic": False})
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
self.assertEqual(channel.json_body, {"automatic": False}, channel.json_body)
def test_subscribe_automatic_then_manual(self) -> None:
"""Test subscribing to a thread, first an automatic subscription then a manual subscription.
@@ -160,8 +160,8 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription",
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.json_body, {"automatic": True})
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
self.assertEqual(channel.json_body, {"automatic": True}, channel.json_body)
# Now also register a manual subscription
channel = self.make_request(
@@ -170,7 +170,7 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
{},
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
# Assert the manual subscription was not overridden
channel = self.make_request(
@@ -178,8 +178,8 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription",
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.json_body, {"automatic": False})
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
self.assertEqual(channel.json_body, {"automatic": False}, channel.json_body)
def test_unsubscribe(self) -> None:
"""Test subscribing to a thread, then unsubscribing."""
@@ -191,7 +191,7 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
},
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
# Assert the subscription was saved
channel = self.make_request(
@@ -199,23 +199,23 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription",
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.json_body, {"automatic": True})
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
self.assertEqual(channel.json_body, {"automatic": True}, channel.json_body)
channel = self.make_request(
"DELETE",
f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription",
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.OK)
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
channel = self.make_request(
"GET",
f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription",
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.NOT_FOUND)
self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND")
self.assertEqual(channel.code, HTTPStatus.NOT_FOUND, channel.json_body)
self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND", channel.json_body)
def test_set_thread_subscription_nonexistent_thread(self) -> None:
"""Test setting subscription settings for a nonexistent thread."""
@@ -225,8 +225,8 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
{},
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.NOT_FOUND)
self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND")
self.assertEqual(channel.code, HTTPStatus.NOT_FOUND, channel.json_body)
self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND", channel.json_body)
def test_set_thread_subscription_no_access(self) -> None:
"""Test that a user can't set thread subscription for a thread they can't access."""
@@ -239,8 +239,8 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
{},
access_token=no_access_token,
)
self.assertEqual(channel.code, HTTPStatus.NOT_FOUND)
self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND")
self.assertEqual(channel.code, HTTPStatus.NOT_FOUND, channel.json_body)
self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND", channel.json_body)
def test_invalid_body(self) -> None:
"""Test that sending invalid subscription settings is rejected."""
@@ -251,7 +251,7 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
{"automatic": True},
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST)
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
channel = self.make_request(
"PUT",
@@ -260,7 +260,7 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase):
{"automatic": "$malformedEventId"},
access_token=self.token,
)
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST)
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
def test_auto_subscribe_cause_event_not_in_thread(self) -> None:
"""