mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-11 01:40:27 +00:00
Compare commits
14 Commits
v1.111.0
...
rei/synwor
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c747b4b37d | ||
|
|
7094bcf251 | ||
|
|
5af3ce8bf8 | ||
|
|
7fd218ad8b | ||
|
|
30627554d4 | ||
|
|
3c41609a9e | ||
|
|
18695c7631 | ||
|
|
c76da0d948 | ||
|
|
3f203e1b10 | ||
|
|
ae7f8cccc2 | ||
|
|
37112f76f7 | ||
|
|
f61931fb8a | ||
|
|
ae1712dd71 | ||
|
|
c1229e0218 |
36
.ci/scripts/setup_complement_prerequisites.sh
Executable file
36
.ci/scripts/setup_complement_prerequisites.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# Common commands to set up Complement's prerequisites in a GitHub Actions CI run.
|
||||||
|
#
|
||||||
|
# Must be called after Synapse has been checked out to `synapse/`.
|
||||||
|
#
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
|
||||||
|
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
|
||||||
|
|
||||||
|
block Set Go Version
|
||||||
|
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||||
|
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||||
|
|
||||||
|
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||||
|
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||||
|
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||||
|
echo "~/go/bin" >> $GITHUB_PATH
|
||||||
|
endblock
|
||||||
|
|
||||||
|
block Install Complement Dependencies
|
||||||
|
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||||
|
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
||||||
|
endblock
|
||||||
|
|
||||||
|
block Install custom gotestfmt template
|
||||||
|
mkdir .gotestfmt/github -p
|
||||||
|
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
||||||
|
endblock
|
||||||
|
|
||||||
|
block Check out Complement
|
||||||
|
# Attempt to check out the same branch of Complement as the PR. If it
|
||||||
|
# doesn't exist, fallback to HEAD.
|
||||||
|
synapse/.ci/scripts/checkout_complement.sh
|
||||||
|
endblock
|
||||||
352
.github/workflows/tests.yml
vendored
352
.github/workflows/tests.yml
vendored
@@ -10,360 +10,37 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-sampleconfig:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: pip install .
|
|
||||||
- run: scripts-dev/generate_sample_config.sh --check
|
|
||||||
- run: scripts-dev/config-lint.sh
|
|
||||||
|
|
||||||
check-schema-delta:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
|
||||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
|
||||||
|
|
||||||
lint:
|
|
||||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
|
||||||
with:
|
|
||||||
typechecking-extras: "all"
|
|
||||||
|
|
||||||
lint-crlf:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Check line endings
|
|
||||||
run: scripts-dev/check_line_terminators.sh
|
|
||||||
|
|
||||||
lint-newsfile:
|
|
||||||
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
|
||||||
- run: scripts-dev/check-newsfragment.sh
|
|
||||||
env:
|
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
|
||||||
|
|
||||||
# Dummy step to gate other tests on without repeating the whole list
|
|
||||||
linting-done:
|
|
||||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
|
||||||
needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig, check-schema-delta]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- run: "true"
|
|
||||||
|
|
||||||
trial:
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
|
||||||
database: ["sqlite"]
|
|
||||||
extras: ["all"]
|
|
||||||
include:
|
|
||||||
# Newest Python without optional deps
|
|
||||||
- python-version: "3.10"
|
|
||||||
extras: ""
|
|
||||||
|
|
||||||
# Oldest Python with PostgreSQL
|
|
||||||
- python-version: "3.7"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "10"
|
|
||||||
extras: "all"
|
|
||||||
|
|
||||||
# Newest Python with newest PostgreSQL
|
|
||||||
- python-version: "3.10"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "14"
|
|
||||||
extras: "all"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
run: |
|
|
||||||
docker run -d -p 5432:5432 \
|
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
|
||||||
postgres:${{ matrix.postgres-version }}
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: ${{ matrix.extras }}
|
|
||||||
- name: Await PostgreSQL
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
timeout-minutes: 2
|
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
|
||||||
- run: poetry run trial --jobs=2 tests
|
|
||||||
env:
|
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-olddeps:
|
|
||||||
# Note: sqlite only; no postgres
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Test with old deps
|
|
||||||
uses: docker://ubuntu:focal # For old python and sqlite
|
|
||||||
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
|
||||||
# See https://github.com/matrix-org/synapse/issues/12343
|
|
||||||
with:
|
|
||||||
workdir: /github/workspace
|
|
||||||
entrypoint: .ci/scripts/test_old_deps.sh
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-pypy:
|
|
||||||
# Very slow; only run if the branch name includes 'pypy'
|
|
||||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
|
||||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["pypy-3.7"]
|
|
||||||
extras: ["all"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: ${{ matrix.extras }}
|
|
||||||
- run: poetry run trial --jobs=2 tests
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
env:
|
|
||||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
|
||||||
POSTGRES: ${{ matrix.postgres && 1}}
|
|
||||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
|
||||||
WORKERS: ${{ matrix.workers && 1 }}
|
|
||||||
REDIS: ${{ matrix.redis && 1 }}
|
|
||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- sytest-tag: focal
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: testing
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: postgres
|
|
||||||
workers: workers
|
|
||||||
redis: redis
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Prepare test blacklist
|
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
export-data:
|
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: [linting-done, portdb]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: "postgres"
|
|
||||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: "postgres"
|
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
|
||||||
|
|
||||||
portdb:
|
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- python-version: "3.7"
|
|
||||||
postgres-version: "10"
|
|
||||||
|
|
||||||
- python-version: "3.10"
|
|
||||||
postgres-version: "14"
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:${{ matrix.postgres-version }}
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: "postgres"
|
|
||||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: "postgres"
|
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
|
||||||
|
|
||||||
complement:
|
complement:
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
if: "${{ !failure() && !cancelled() }}"
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- arrangement: monolith
|
|
||||||
database: SQLite
|
|
||||||
|
|
||||||
- arrangement: monolith
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
- arrangement: workers
|
- arrangement: workers
|
||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
|
||||||
- name: "Set Go Version"
|
|
||||||
run: |
|
|
||||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
|
||||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
|
||||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
|
||||||
echo "~/go/bin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: "Install Complement Dependencies"
|
|
||||||
run: |
|
|
||||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
|
||||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
|
||||||
|
|
||||||
- name: Run actions/checkout@v2 for synapse
|
- name: Run actions/checkout@v2 for synapse
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: "Install custom gotestfmt template"
|
- name: Prepare Complement's Prerequisites
|
||||||
run: |
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
mkdir .gotestfmt/github -p
|
|
||||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
|
||||||
|
|
||||||
# Attempt to check out the same branch of Complement as the PR. If it
|
|
||||||
# doesn't exist, fallback to HEAD.
|
|
||||||
- name: Checkout complement
|
|
||||||
run: synapse/.ci/scripts/checkout_complement.sh
|
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
synapse/scripts-dev/complement.sh --build-only
|
||||||
|
while :; do
|
||||||
|
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} SYNAPSE_TEST_LOG_LEVEL=DEBUG COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -f -run TestMembersLocal -json 2>&1 | gotestfmt | tee /tmp/xxx
|
||||||
|
if grep FAIL /tmp/xxx; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
shell: bash
|
shell: bash
|
||||||
name: Run Complement Tests
|
name: Run Complement Tests
|
||||||
|
|
||||||
@@ -371,15 +48,6 @@ jobs:
|
|||||||
tests-done:
|
tests-done:
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
needs:
|
needs:
|
||||||
- check-sampleconfig
|
|
||||||
- lint
|
|
||||||
- lint-crlf
|
|
||||||
- lint-newsfile
|
|
||||||
- trial
|
|
||||||
- trial-olddeps
|
|
||||||
- sytest
|
|
||||||
- export-data
|
|
||||||
- portdb
|
|
||||||
- complement
|
- complement
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
27
.github/workflows/twisted_trunk.yml
vendored
27
.github/workflows/twisted_trunk.yml
vendored
@@ -114,25 +114,14 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
|
||||||
- name: "Set Go Version"
|
|
||||||
run: |
|
|
||||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
|
||||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
|
||||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
|
||||||
echo "~/go/bin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: "Install Complement Dependencies"
|
|
||||||
run: |
|
|
||||||
sudo apt-get update && sudo apt-get install -y libolm3 libolm-dev
|
|
||||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
|
||||||
|
|
||||||
- name: Run actions/checkout@v2 for synapse
|
- name: Run actions/checkout@v2 for synapse
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
|
- name: Prepare Complement's Prerequisites
|
||||||
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
# This step is specific to the 'Twisted trunk' test run:
|
# This step is specific to the 'Twisted trunk' test run:
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -146,16 +135,6 @@ jobs:
|
|||||||
# NOT IN 1.1.12 poetry lock --check
|
# NOT IN 1.1.12 poetry lock --check
|
||||||
working-directory: synapse
|
working-directory: synapse
|
||||||
|
|
||||||
- name: "Install custom gotestfmt template"
|
|
||||||
run: |
|
|
||||||
mkdir .gotestfmt/github -p
|
|
||||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
|
||||||
|
|
||||||
# Attempt to check out the same branch of Complement as the PR. If it
|
|
||||||
# doesn't exist, fallback to HEAD.
|
|
||||||
- name: Checkout complement
|
|
||||||
run: synapse/.ci/scripts/checkout_complement.sh
|
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||||
|
|||||||
1
changelog.d/13157.misc
Normal file
1
changelog.d/13157.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Enable Complement testing in the 'Twisted Trunk' CI runs.
|
||||||
1
changelog.d/13158.misc
Normal file
1
changelog.d/13158.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add support to `complement.sh` for skipping the docker build.
|
||||||
@@ -44,15 +44,20 @@ usage() {
|
|||||||
Usage: $0 [-f] <go test arguments>...
|
Usage: $0 [-f] <go test arguments>...
|
||||||
Run the complement test suite on Synapse.
|
Run the complement test suite on Synapse.
|
||||||
|
|
||||||
-f Skip rebuilding the docker images, and just use the most recent
|
-f, --fast
|
||||||
|
Skip rebuilding the docker images, and just use the most recent
|
||||||
'complement-synapse:latest' image
|
'complement-synapse:latest' image
|
||||||
|
|
||||||
|
--build-only
|
||||||
|
Only build the Docker images. Don't actually run Complement.
|
||||||
|
|
||||||
For help on arguments to 'go test', run 'go help testflag'.
|
For help on arguments to 'go test', run 'go help testflag'.
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
# parse our arguments
|
# parse our arguments
|
||||||
skip_docker_build=""
|
skip_docker_build=""
|
||||||
|
skip_complement_run=""
|
||||||
while [ $# -ge 1 ]; do
|
while [ $# -ge 1 ]; do
|
||||||
arg=$1
|
arg=$1
|
||||||
case "$arg" in
|
case "$arg" in
|
||||||
@@ -60,9 +65,12 @@ while [ $# -ge 1 ]; do
|
|||||||
usage
|
usage
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
"-f")
|
"-f"|"--fast")
|
||||||
skip_docker_build=1
|
skip_docker_build=1
|
||||||
;;
|
;;
|
||||||
|
"--build-only")
|
||||||
|
skip_complement_run=1
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
# unknown arg: presumably an argument to gotest. break the loop.
|
# unknown arg: presumably an argument to gotest. break the loop.
|
||||||
break
|
break
|
||||||
@@ -106,6 +114,11 @@ if [ -z "$skip_docker_build" ]; then
|
|||||||
echo_if_github "::endgroup::"
|
echo_if_github "::endgroup::"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ -n "$skip_complement_run" ]; then
|
||||||
|
echo "Skipping Complement run as requested."
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||||
|
|
||||||
extra_test_args=()
|
extra_test_args=()
|
||||||
|
|||||||
@@ -476,7 +476,7 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||||||
|
|
||||||
return results_dict.get("membership"), results_dict.get("event_id")
|
return results_dict.get("membership"), results_dict.get("event_id")
|
||||||
|
|
||||||
@cached(max_entries=500000, iterable=True, prune_unread_entries=False)
|
@cached(max_entries=500000, iterable=True, prune_unread_entries=False, debug_invalidations=True)
|
||||||
async def get_rooms_for_user_with_stream_ordering(
|
async def get_rooms_for_user_with_stream_ordering(
|
||||||
self, user_id: str
|
self, user_id: str
|
||||||
) -> FrozenSet[GetRoomsForUserWithStreamOrdering]:
|
) -> FrozenSet[GetRoomsForUserWithStreamOrdering]:
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
|
import logging
|
||||||
import threading
|
import threading
|
||||||
from typing import (
|
from typing import (
|
||||||
Callable,
|
Callable,
|
||||||
@@ -66,6 +67,7 @@ class DeferredCache(Generic[KT, VT]):
|
|||||||
"cache",
|
"cache",
|
||||||
"thread",
|
"thread",
|
||||||
"_pending_deferred_cache",
|
"_pending_deferred_cache",
|
||||||
|
"debug_invalidations"
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -76,6 +78,7 @@ class DeferredCache(Generic[KT, VT]):
|
|||||||
iterable: bool = False,
|
iterable: bool = False,
|
||||||
apply_cache_factor_from_config: bool = True,
|
apply_cache_factor_from_config: bool = True,
|
||||||
prune_unread_entries: bool = True,
|
prune_unread_entries: bool = True,
|
||||||
|
debug_invalidations: bool = False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
@@ -119,6 +122,7 @@ class DeferredCache(Generic[KT, VT]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.thread: Optional[threading.Thread] = None
|
self.thread: Optional[threading.Thread] = None
|
||||||
|
self.debug_invalidations = debug_invalidations
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def max_entries(self) -> int:
|
def max_entries(self) -> int:
|
||||||
@@ -310,6 +314,9 @@ class DeferredCache(Generic[KT, VT]):
|
|||||||
self.check_thread()
|
self.check_thread()
|
||||||
self.cache.del_multi(key)
|
self.cache.del_multi(key)
|
||||||
|
|
||||||
|
if self.debug_invalidations:
|
||||||
|
logging.debug("Invalidating key %r in cache", key)
|
||||||
|
|
||||||
# if we have a pending lookup for this key, remove it from the
|
# if we have a pending lookup for this key, remove it from the
|
||||||
# _pending_deferred_cache, which will (a) stop it being returned
|
# _pending_deferred_cache, which will (a) stop it being returned
|
||||||
# for future queries and (b) stop it being persisted as a proper entry
|
# for future queries and (b) stop it being persisted as a proper entry
|
||||||
@@ -326,6 +333,8 @@ class DeferredCache(Generic[KT, VT]):
|
|||||||
entry.invalidate()
|
entry.invalidate()
|
||||||
|
|
||||||
def invalidate_all(self) -> None:
|
def invalidate_all(self) -> None:
|
||||||
|
if self.debug_invalidations:
|
||||||
|
logging.debug("Invalidating ALL keys")
|
||||||
self.check_thread()
|
self.check_thread()
|
||||||
self.cache.clear()
|
self.cache.clear()
|
||||||
for entry in self._pending_deferred_cache.values():
|
for entry in self._pending_deferred_cache.values():
|
||||||
|
|||||||
@@ -301,6 +301,7 @@ class DeferredCacheDescriptor(_CacheDescriptorBase):
|
|||||||
cache_context: bool = False,
|
cache_context: bool = False,
|
||||||
iterable: bool = False,
|
iterable: bool = False,
|
||||||
prune_unread_entries: bool = True,
|
prune_unread_entries: bool = True,
|
||||||
|
debug_invalidations: bool = False
|
||||||
):
|
):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
orig,
|
orig,
|
||||||
@@ -318,6 +319,7 @@ class DeferredCacheDescriptor(_CacheDescriptorBase):
|
|||||||
self.tree = tree
|
self.tree = tree
|
||||||
self.iterable = iterable
|
self.iterable = iterable
|
||||||
self.prune_unread_entries = prune_unread_entries
|
self.prune_unread_entries = prune_unread_entries
|
||||||
|
self.debug_invalidations = debug_invalidations
|
||||||
|
|
||||||
def __get__(self, obj: Optional[Any], owner: Optional[Type]) -> Callable[..., Any]:
|
def __get__(self, obj: Optional[Any], owner: Optional[Type]) -> Callable[..., Any]:
|
||||||
cache: DeferredCache[CacheKey, Any] = DeferredCache(
|
cache: DeferredCache[CacheKey, Any] = DeferredCache(
|
||||||
@@ -326,6 +328,7 @@ class DeferredCacheDescriptor(_CacheDescriptorBase):
|
|||||||
tree=self.tree,
|
tree=self.tree,
|
||||||
iterable=self.iterable,
|
iterable=self.iterable,
|
||||||
prune_unread_entries=self.prune_unread_entries,
|
prune_unread_entries=self.prune_unread_entries,
|
||||||
|
debug_invalidations=self.debug_invalidations,
|
||||||
)
|
)
|
||||||
|
|
||||||
get_cache_key = self.cache_key_builder
|
get_cache_key = self.cache_key_builder
|
||||||
@@ -577,6 +580,7 @@ def cached(
|
|||||||
cache_context: bool = False,
|
cache_context: bool = False,
|
||||||
iterable: bool = False,
|
iterable: bool = False,
|
||||||
prune_unread_entries: bool = True,
|
prune_unread_entries: bool = True,
|
||||||
|
debug_invalidations: bool = False,
|
||||||
) -> Callable[[F], _CachedFunction[F]]:
|
) -> Callable[[F], _CachedFunction[F]]:
|
||||||
func = lambda orig: DeferredCacheDescriptor(
|
func = lambda orig: DeferredCacheDescriptor(
|
||||||
orig,
|
orig,
|
||||||
@@ -587,6 +591,7 @@ def cached(
|
|||||||
cache_context=cache_context,
|
cache_context=cache_context,
|
||||||
iterable=iterable,
|
iterable=iterable,
|
||||||
prune_unread_entries=prune_unread_entries,
|
prune_unread_entries=prune_unread_entries,
|
||||||
|
debug_invalidations=debug_invalidations
|
||||||
)
|
)
|
||||||
|
|
||||||
return cast(Callable[[F], _CachedFunction[F]], func)
|
return cast(Callable[[F], _CachedFunction[F]], func)
|
||||||
|
|||||||
Reference in New Issue
Block a user