mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-17 02:10:27 +00:00
Compare commits
1 Commits
v1.62.0rc3
...
erikj/test
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa2fe082ae |
13
.buildkite/.env
Normal file
13
.buildkite/.env
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
CI
|
||||||
|
BUILDKITE
|
||||||
|
BUILDKITE_BUILD_NUMBER
|
||||||
|
BUILDKITE_BRANCH
|
||||||
|
BUILDKITE_BUILD_NUMBER
|
||||||
|
BUILDKITE_JOB_ID
|
||||||
|
BUILDKITE_BUILD_URL
|
||||||
|
BUILDKITE_PROJECT_SLUG
|
||||||
|
BUILDKITE_COMMIT
|
||||||
|
BUILDKITE_PULL_REQUEST
|
||||||
|
BUILDKITE_TAG
|
||||||
|
CODECOV_TOKEN
|
||||||
|
TRIAL_FLAGS
|
||||||
35
.buildkite/merge_base_branch.sh
Executable file
35
.buildkite/merge_base_branch.sh
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
||||||
|
echo "Not merging forward, as this is a release branch"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]]; then
|
||||||
|
echo "Not a pull request, or hasn't had a PR opened yet..."
|
||||||
|
|
||||||
|
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
||||||
|
# can probably assume it's based on it and will be merged into it.
|
||||||
|
GITBASE="develop"
|
||||||
|
else
|
||||||
|
# Get the reference, using the GitHub API
|
||||||
|
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "--- merge_base_branch $GITBASE"
|
||||||
|
|
||||||
|
# Show what we are before
|
||||||
|
git --no-pager show -s
|
||||||
|
|
||||||
|
# Set up username so it can do a merge
|
||||||
|
git config --global user.email bot@matrix.org
|
||||||
|
git config --global user.name "A robot"
|
||||||
|
|
||||||
|
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||||
|
git fetch -u origin $GITBASE
|
||||||
|
git merge --no-edit --no-commit origin/$GITBASE
|
||||||
|
|
||||||
|
# Show what we are after.
|
||||||
|
git --no-pager show -s
|
||||||
@@ -3,7 +3,7 @@
|
|||||||
# CI's Docker setup at the point where this file is considered.
|
# CI's Docker setup at the point where this file is considered.
|
||||||
server_name: "localhost:8800"
|
server_name: "localhost:8800"
|
||||||
|
|
||||||
signing_key_path: ".ci/test.signing.key"
|
signing_key_path: "/src/.buildkite/test.signing.key"
|
||||||
|
|
||||||
report_stats: false
|
report_stats: false
|
||||||
|
|
||||||
@@ -11,9 +11,11 @@ database:
|
|||||||
name: "psycopg2"
|
name: "psycopg2"
|
||||||
args:
|
args:
|
||||||
user: postgres
|
user: postgres
|
||||||
host: localhost
|
host: postgres
|
||||||
password: postgres
|
password: postgres
|
||||||
database: synapse
|
database: synapse
|
||||||
|
|
||||||
# Suppress the key server warning.
|
# Suppress the key server warning.
|
||||||
trusted_key_servers: []
|
trusted_key_servers:
|
||||||
|
- server_name: "matrix.org"
|
||||||
|
suppress_key_server_warning: true
|
||||||
37
.buildkite/scripts/create_postgres_db.py
Executable file
37
.buildkite/scripts/create_postgres_db.py
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from synapse.storage.engines import create_engine
|
||||||
|
|
||||||
|
logger = logging.getLogger("create_postgres_db")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Create a PostgresEngine.
|
||||||
|
db_engine = create_engine({"name": "psycopg2", "args": {}})
|
||||||
|
|
||||||
|
# Connect to postgres to create the base database.
|
||||||
|
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
||||||
|
# doesn't exist yet.
|
||||||
|
db_conn = db_engine.module.connect(
|
||||||
|
user="postgres", host="postgres", password="postgres", dbname="postgres"
|
||||||
|
)
|
||||||
|
db_conn.autocommit = True
|
||||||
|
cur = db_conn.cursor()
|
||||||
|
cur.execute("CREATE DATABASE synapse;")
|
||||||
|
cur.close()
|
||||||
|
db_conn.close()
|
||||||
11
.buildkite/scripts/setup_old_deps.sh
Executable file
11
.buildkite/scripts/setup_old_deps.sh
Executable file
@@ -0,0 +1,11 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# this script is run by buildkite in a plain `xenial` container; it installs the
|
||||||
|
# minimal requirements for tox and hands over to the py35-old tox environment.
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
||||||
|
|
||||||
|
export LANG="C.UTF-8"
|
||||||
36
.buildkite/scripts/test_synapse_port_db.sh
Executable file
36
.buildkite/scripts/test_synapse_port_db.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along
|
||||||
|
# with additional dependencies needed for the test (such as coverage or the PostgreSQL
|
||||||
|
# driver), update the schema of the test SQLite database and run background updates on it,
|
||||||
|
# create an empty test database in PostgreSQL, then run the 'synapse_port_db' script to
|
||||||
|
# test porting the SQLite database to the PostgreSQL database (with coverage).
|
||||||
|
|
||||||
|
set -xe
|
||||||
|
cd `dirname $0`/../..
|
||||||
|
|
||||||
|
echo "--- Install dependencies"
|
||||||
|
|
||||||
|
# Install dependencies for this test.
|
||||||
|
pip install psycopg2 coverage coverage-enable-subprocess
|
||||||
|
|
||||||
|
# Install Synapse itself. This won't update any libraries.
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
echo "--- Generate the signing key"
|
||||||
|
|
||||||
|
# Generate the server's signing key.
|
||||||
|
python -m synapse.app.homeserver --generate-keys -c .buildkite/sqlite-config.yaml
|
||||||
|
|
||||||
|
echo "--- Prepare the databases"
|
||||||
|
|
||||||
|
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||||
|
scripts-dev/update_database --database-config .buildkite/sqlite-config.yaml
|
||||||
|
|
||||||
|
# Create the PostgreSQL database.
|
||||||
|
./.buildkite/scripts/create_postgres_db.py
|
||||||
|
|
||||||
|
echo "+++ Run synapse_port_db"
|
||||||
|
|
||||||
|
# Run the script
|
||||||
|
coverage run scripts/synapse_port_db --sqlite-database .buildkite/test_db.db --postgres-config .buildkite/postgres-config.yaml
|
||||||
@@ -3,14 +3,16 @@
|
|||||||
# schema and run background updates on it.
|
# schema and run background updates on it.
|
||||||
server_name: "localhost:8800"
|
server_name: "localhost:8800"
|
||||||
|
|
||||||
signing_key_path: ".ci/test.signing.key"
|
signing_key_path: "/src/.buildkite/test.signing.key"
|
||||||
|
|
||||||
report_stats: false
|
report_stats: false
|
||||||
|
|
||||||
database:
|
database:
|
||||||
name: "sqlite3"
|
name: "sqlite3"
|
||||||
args:
|
args:
|
||||||
database: ".ci/test_db.db"
|
database: ".buildkite/test_db.db"
|
||||||
|
|
||||||
# Suppress the key server warning.
|
# Suppress the key server warning.
|
||||||
trusted_key_servers: []
|
trusted_key_servers:
|
||||||
|
- server_name: "matrix.org"
|
||||||
|
suppress_key_server_warning: true
|
||||||
10
.buildkite/worker-blacklist
Normal file
10
.buildkite/worker-blacklist
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
||||||
|
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
||||||
|
|
||||||
|
Can re-join room if re-invited
|
||||||
|
|
||||||
|
# new failures as of https://github.com/matrix-org/sytest/pull/732
|
||||||
|
Device list doesn't change if remote server is down
|
||||||
|
|
||||||
|
# https://buildkite.com/matrix-dot-org/synapse/builds/6134#6f67bf47-e234-474d-80e8-c6e1868b15c5
|
||||||
|
Server correctly handles incoming m.device_list_update
|
||||||
@@ -1,93 +0,0 @@
|
|||||||
{{- /*gotype: github.com/haveyoudebuggedit/gotestfmt/parser.Package*/ -}}
|
|
||||||
{{- /*
|
|
||||||
This template contains the format for an individual package. GitHub actions does not currently support nested groups so
|
|
||||||
we are creating a stylized header for each package.
|
|
||||||
|
|
||||||
This template is based on https://github.com/haveyoudebuggedit/gotestfmt/blob/f179b0e462a9dcf7101515d87eec4e4d7e58b92a/.gotestfmt/github/package.gotpl
|
|
||||||
which is under the Unlicense licence.
|
|
||||||
*/ -}}
|
|
||||||
{{- $settings := .Settings -}}
|
|
||||||
{{- if and (or (not $settings.HideSuccessfulPackages) (ne .Result "PASS")) (or (not $settings.HideEmptyPackages) (ne .Result "SKIP") (ne (len .TestCases) 0)) -}}
|
|
||||||
{{- if eq .Result "PASS" -}}
|
|
||||||
{{ "\033" }}[0;32m
|
|
||||||
{{- else if eq .Result "SKIP" -}}
|
|
||||||
{{ "\033" }}[0;33m
|
|
||||||
{{- else -}}
|
|
||||||
{{ "\033" }}[0;31m
|
|
||||||
{{- end -}}
|
|
||||||
📦 {{ .Name }}{{- "\033" }}[0m
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{ . }}% coverage){{- "\033" -}}[0m
|
|
||||||
{{- end -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- with .Reason -}}
|
|
||||||
{{- " " -}}🛑 {{ . -}}{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- . -}}{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- with .TestCases -}}
|
|
||||||
{{- /* Failing tests are first */ -}}
|
|
||||||
{{- range . -}}
|
|
||||||
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
|
||||||
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
, coverage: {{ . }}%
|
|
||||||
{{- end -}})
|
|
||||||
{{- "\033" -}}[0m
|
|
||||||
{{- "\n" -}}
|
|
||||||
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- formatTestOutput . $settings -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
::endgroup::{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
|
|
||||||
{{- /* Then skipped tests are second */ -}}
|
|
||||||
{{- range . -}}
|
|
||||||
{{- if eq .Result "SKIP" -}}
|
|
||||||
::group::{{ "\033" }}[0;33m🚧{{ " " }}{{- .Name -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
, coverage: {{ . }}%
|
|
||||||
{{- end -}})
|
|
||||||
{{- "\033" -}}[0m
|
|
||||||
{{- "\n" -}}
|
|
||||||
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- formatTestOutput . $settings -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
::endgroup::{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
|
|
||||||
{{- /* Then passing tests are last */ -}}
|
|
||||||
{{- range . -}}
|
|
||||||
{{- if eq .Result "PASS" -}}
|
|
||||||
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
, coverage: {{ . }}%
|
|
||||||
{{- end -}})
|
|
||||||
{{- "\033" -}}[0m
|
|
||||||
{{- "\n" -}}
|
|
||||||
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- formatTestOutput . $settings -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
::endgroup::{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
---
|
|
||||||
title: CI run against latest deps is failing
|
|
||||||
---
|
|
||||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Fetches a version of complement which best matches the current build.
|
|
||||||
#
|
|
||||||
# The tarball is unpacked into `./complement`.
|
|
||||||
|
|
||||||
set -e
|
|
||||||
mkdir -p complement
|
|
||||||
|
|
||||||
# Pick an appropriate version of complement. Depending on whether this is a PR or release,
|
|
||||||
# etc. we need to use different fallbacks:
|
|
||||||
#
|
|
||||||
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
|
||||||
# for pull requests, otherwise GITHUB_REF).
|
|
||||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
|
||||||
# (GITHUB_BASE_REF for pull requests).
|
|
||||||
# 3. Use the default complement branch ("HEAD").
|
|
||||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
|
|
||||||
# Skip empty branch names and merge commits.
|
|
||||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
|
||||||
done
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import psycopg2
|
|
||||||
|
|
||||||
# a very simple replacment for `psql`, to make up for the lack of the postgres client
|
|
||||||
# libraries in the synapse docker image.
|
|
||||||
|
|
||||||
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
|
||||||
# doesn't exist yet.
|
|
||||||
db_conn = psycopg2.connect(
|
|
||||||
user="postgres", host="localhost", password="postgres", dbname="postgres"
|
|
||||||
)
|
|
||||||
db_conn.autocommit = True
|
|
||||||
cur = db_conn.cursor()
|
|
||||||
for c in sys.argv[1:]:
|
|
||||||
cur.execute(c)
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Test for the export-data admin command against sqlite and postgres
|
|
||||||
|
|
||||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
|
||||||
# Expects `poetry` to be available on the `PATH`.
|
|
||||||
|
|
||||||
set -xe
|
|
||||||
cd "$(dirname "$0")/../.."
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
|
||||||
|
|
||||||
# Generate the server's signing key.
|
|
||||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
|
||||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
|
||||||
|
|
||||||
# Run the export-data command on the sqlite test database
|
|
||||||
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
|
||||||
--output-directory /tmp/export_data
|
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
|
||||||
dir="/tmp/export_data/rooms"
|
|
||||||
if [ -d "$dir" ]; then
|
|
||||||
echo "Command successful, this test passes"
|
|
||||||
else
|
|
||||||
echo "No output directories found, the command fails against a sqlite database."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
|
||||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
|
||||||
|
|
||||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
|
||||||
echo "+++ Port SQLite3 databse to postgres"
|
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
# Run the export-data command on postgres database
|
|
||||||
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
|
||||||
--output-directory /tmp/export_data2
|
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
|
||||||
dir2="/tmp/export_data2/rooms"
|
|
||||||
if [ -d "$dir2" ]; then
|
|
||||||
echo "Command successful, this test passes"
|
|
||||||
else
|
|
||||||
echo "No output directories found, the command fails against a postgres database."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# this script is run by GitHub Actions in a plain `focal` container; it
|
|
||||||
# - installs the minimal system requirements, and poetry;
|
|
||||||
# - patches the project definition file to refer to old versions only;
|
|
||||||
# - creates a venv with these old versions using poetry; and finally
|
|
||||||
# - invokes `trial` to run the tests with old deps.
|
|
||||||
|
|
||||||
# Prevent tzdata from asking for user input
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y \
|
|
||||||
python3 python3-dev python3-pip python3-venv pipx \
|
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
|
||||||
|
|
||||||
export LANG="C.UTF-8"
|
|
||||||
|
|
||||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
|
||||||
export VIRTUALENV_NO_DOWNLOAD=1
|
|
||||||
|
|
||||||
# TODO: in the future, we could use an implementation of
|
|
||||||
# https://github.com/python-poetry/poetry/issues/3527
|
|
||||||
# https://github.com/pypa/pip/issues/8085
|
|
||||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
|
||||||
|
|
||||||
# Patch the project definitions in-place:
|
|
||||||
# - Replace all lower and tilde bounds with exact bounds
|
|
||||||
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
|
||||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
|
||||||
# - Use pyopenssl 17.0, which is the oldest version that works with
|
|
||||||
# a `cryptography` compiled against OpenSSL 1.1.
|
|
||||||
# - Omit systemd: we're not logging to journal here.
|
|
||||||
|
|
||||||
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
|
|
||||||
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
|
|
||||||
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
|
|
||||||
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
|
|
||||||
# runs on 3.8 (#12343).
|
|
||||||
|
|
||||||
sed -i \
|
|
||||||
-e "s/[~>]=/==/g" \
|
|
||||||
-e '/^python = "^/!s/\^/==/g' \
|
|
||||||
-e "/psycopg2/d" \
|
|
||||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
|
||||||
-e '/systemd/d' \
|
|
||||||
pyproject.toml
|
|
||||||
|
|
||||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
|
||||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
|
||||||
# is more lax.
|
|
||||||
#
|
|
||||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
|
||||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
|
||||||
# dev tools.
|
|
||||||
|
|
||||||
pip install --user toml
|
|
||||||
|
|
||||||
REMOVE_DEV_DEPENDENCIES="
|
|
||||||
import toml
|
|
||||||
with open('pyproject.toml', 'r') as f:
|
|
||||||
data = toml.loads(f.read())
|
|
||||||
|
|
||||||
del data['tool']['poetry']['dev-dependencies']
|
|
||||||
|
|
||||||
with open('pyproject.toml', 'w') as f:
|
|
||||||
toml.dump(data, f)
|
|
||||||
"
|
|
||||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
|
||||||
|
|
||||||
pipx install poetry==1.1.12
|
|
||||||
~/.local/bin/poetry lock
|
|
||||||
|
|
||||||
echo "::group::Patched pyproject.toml"
|
|
||||||
cat pyproject.toml
|
|
||||||
echo "::endgroup::"
|
|
||||||
echo "::group::Lockfile after patch"
|
|
||||||
cat poetry.lock
|
|
||||||
echo "::endgroup::"
|
|
||||||
|
|
||||||
~/.local/bin/poetry install -E "all test"
|
|
||||||
~/.local/bin/poetry run trial --jobs=2 tests
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
#
|
|
||||||
# Test script for 'synapse_port_db'.
|
|
||||||
# - configures synapse and a postgres server.
|
|
||||||
# - runs the port script on a prepopulated test sqlite db
|
|
||||||
# - also runs it against an new sqlite db
|
|
||||||
#
|
|
||||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
|
||||||
# Expects `poetry` to be available on the `PATH`.
|
|
||||||
|
|
||||||
set -xe
|
|
||||||
cd "$(dirname "$0")/../.."
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
|
||||||
|
|
||||||
# Generate the server's signing key.
|
|
||||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
|
||||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
|
||||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against test database"
|
|
||||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
|
||||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
# We should be able to run twice against the same database.
|
|
||||||
echo "+++ Run synapse_port_db a second time"
|
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
#####
|
|
||||||
|
|
||||||
# Now do the same again, on an empty database.
|
|
||||||
|
|
||||||
echo "--- Prepare empty SQLite database"
|
|
||||||
|
|
||||||
# we do this by deleting the sqlite db, and then doing the same again.
|
|
||||||
rm .ci/test_db.db
|
|
||||||
|
|
||||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
|
||||||
|
|
||||||
# re-create the PostgreSQL database.
|
|
||||||
poetry run .ci/scripts/postgres_exec.py \
|
|
||||||
"DROP DATABASE synapse" \
|
|
||||||
"CREATE DATABASE synapse"
|
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against empty database"
|
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
---
|
|
||||||
title: CI run against Twisted trunk is failing
|
|
||||||
---
|
|
||||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
|
||||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
|
||||||
78
.circleci/config.yml
Normal file
78
.circleci/config.yml
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
version: 2.1
|
||||||
|
jobs:
|
||||||
|
dockerhubuploadrelease:
|
||||||
|
docker:
|
||||||
|
- image: docker:git
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- docker_prepare
|
||||||
|
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||||
|
# for release builds, we want to get the amd64 image out asap, so first
|
||||||
|
# we do an amd64-only build, before following up with a multiarch build.
|
||||||
|
- docker_build:
|
||||||
|
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
||||||
|
platforms: linux/amd64
|
||||||
|
- docker_build:
|
||||||
|
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
||||||
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
|
|
||||||
|
dockerhubuploadlatest:
|
||||||
|
docker:
|
||||||
|
- image: docker:git
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- docker_prepare
|
||||||
|
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||||
|
# for `latest`, we don't want the arm images to disappear, so don't update the tag
|
||||||
|
# until all of the platforms are built.
|
||||||
|
- docker_build:
|
||||||
|
tag: -t matrixdotorg/synapse:latest
|
||||||
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
|
|
||||||
|
workflows:
|
||||||
|
build:
|
||||||
|
jobs:
|
||||||
|
- dockerhubuploadrelease:
|
||||||
|
filters:
|
||||||
|
tags:
|
||||||
|
only: /v[0-9].[0-9]+.[0-9]+.*/
|
||||||
|
branches:
|
||||||
|
ignore: /.*/
|
||||||
|
- dockerhubuploadlatest:
|
||||||
|
filters:
|
||||||
|
branches:
|
||||||
|
only: master
|
||||||
|
|
||||||
|
commands:
|
||||||
|
docker_prepare:
|
||||||
|
description: Sets up a remote docker server, downloads the buildx cli plugin, and enables multiarch images
|
||||||
|
parameters:
|
||||||
|
buildx_version:
|
||||||
|
type: string
|
||||||
|
default: "v0.4.1"
|
||||||
|
steps:
|
||||||
|
- setup_remote_docker:
|
||||||
|
# 19.03.13 was the most recent available on circleci at the time of
|
||||||
|
# writing.
|
||||||
|
version: 19.03.13
|
||||||
|
- run: apk add --no-cache curl
|
||||||
|
- run: mkdir -vp ~/.docker/cli-plugins/ ~/dockercache
|
||||||
|
- run: curl --silent -L "https://github.com/docker/buildx/releases/download/<< parameters.buildx_version >>/buildx-<< parameters.buildx_version >>.linux-amd64" > ~/.docker/cli-plugins/docker-buildx
|
||||||
|
- run: chmod a+x ~/.docker/cli-plugins/docker-buildx
|
||||||
|
# install qemu links in /proc/sys/fs/binfmt_misc on the docker instance running the circleci job
|
||||||
|
- run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||||
|
# create a context named `builder` for the builds
|
||||||
|
- run: docker context create builder
|
||||||
|
# create a buildx builder using the new context, and set it as the default
|
||||||
|
- run: docker buildx create builder --use
|
||||||
|
|
||||||
|
docker_build:
|
||||||
|
description: Builds and pushed images to dockerhub using buildx
|
||||||
|
parameters:
|
||||||
|
platforms:
|
||||||
|
type: string
|
||||||
|
default: linux/amd64
|
||||||
|
tag:
|
||||||
|
type: string
|
||||||
|
steps:
|
||||||
|
- run: docker buildx build -f docker/Dockerfile --push --platform << parameters.platforms >> --label gitsha1=${CIRCLE_SHA1} << parameters.tag >> --progress=plain .
|
||||||
@@ -3,9 +3,11 @@
|
|||||||
|
|
||||||
# things to include
|
# things to include
|
||||||
!docker
|
!docker
|
||||||
|
!scripts
|
||||||
!synapse
|
!synapse
|
||||||
|
!MANIFEST.in
|
||||||
!README.rst
|
!README.rst
|
||||||
!pyproject.toml
|
!setup.py
|
||||||
!poetry.lock
|
!synctl
|
||||||
|
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
|
|||||||
11
.flake8
11
.flake8
@@ -1,11 +0,0 @@
|
|||||||
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
|
|
||||||
# See https://github.com/PyCQA/flake8/issues/234
|
|
||||||
[flake8]
|
|
||||||
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
|
||||||
# for error codes. The ones we ignore are:
|
|
||||||
# W503: line break before binary operator
|
|
||||||
# W504: line break after binary operator
|
|
||||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
|
||||||
# E731: do not assign a lambda expression, use a def
|
|
||||||
# E501: Line too long (black enforces this for us)
|
|
||||||
ignore=W503,W504,E203,E731,E501
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
# Black reformatting (#5482).
|
|
||||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
|
||||||
|
|
||||||
# Target Python 3.5 with black (#8664).
|
|
||||||
aff1eb7c671b0a3813407321d2702ec46c71fa56
|
|
||||||
|
|
||||||
# Update black to 20.8b1 (#9381).
|
|
||||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
|
||||||
|
|
||||||
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
|
|
||||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
|
||||||
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -1,2 +0,0 @@
|
|||||||
# Automatically request reviews from the synapse-core team when a pull request comes in.
|
|
||||||
* @matrix-org/synapse-core
|
|
||||||
72
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
72
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<!--
|
||||||
|
|
||||||
|
**THIS IS NOT A SUPPORT CHANNEL!**
|
||||||
|
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**,
|
||||||
|
please ask in **#synapse:matrix.org** (using a matrix.org account if necessary)
|
||||||
|
|
||||||
|
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
||||||
|
|
||||||
|
This is a bug report template. By following the instructions below and
|
||||||
|
filling out the sections with your information, you will help the us to get all
|
||||||
|
the necessary data to fix your issue.
|
||||||
|
|
||||||
|
You can also preview your report before submitting it. You may remove sections
|
||||||
|
that aren't relevant to your particular case.
|
||||||
|
|
||||||
|
Text between <!-- and --> marks will be invisible in the report.
|
||||||
|
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Description
|
||||||
|
|
||||||
|
<!-- Describe here the problem that you are experiencing -->
|
||||||
|
|
||||||
|
### Steps to reproduce
|
||||||
|
|
||||||
|
- list the steps
|
||||||
|
- that reproduce the bug
|
||||||
|
- using hyphens as bullet points
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Describe how what happens differs from what you expected.
|
||||||
|
|
||||||
|
If you can identify any relevant log snippets from _homeserver.log_, please include
|
||||||
|
those (please be careful to remove any personal or private data). Please surround them with
|
||||||
|
``` (three backticks, on a line on their own), so that they are formatted legibly.
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Version information
|
||||||
|
|
||||||
|
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
|
||||||
|
|
||||||
|
<!-- Was this issue identified on matrix.org or another homeserver? -->
|
||||||
|
- **Homeserver**:
|
||||||
|
|
||||||
|
If not matrix.org:
|
||||||
|
|
||||||
|
<!--
|
||||||
|
What version of Synapse is running?
|
||||||
|
|
||||||
|
You can find the Synapse version with this command:
|
||||||
|
|
||||||
|
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
||||||
|
|
||||||
|
(You may need to replace `localhost:8008` if Synapse is not configured to
|
||||||
|
listen on that port.)
|
||||||
|
-->
|
||||||
|
- **Version**:
|
||||||
|
|
||||||
|
- **Install method**:
|
||||||
|
<!-- examples: package manager/git clone/pip -->
|
||||||
|
|
||||||
|
- **Platform**:
|
||||||
|
<!--
|
||||||
|
Tell us about the environment in which your homeserver is operating
|
||||||
|
distro, hardware, if it's running in a vm/container, etc.
|
||||||
|
-->
|
||||||
103
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
103
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -1,103 +0,0 @@
|
|||||||
name: Bug report
|
|
||||||
description: Create a report to help us improve
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
|
||||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
|
|
||||||
|
|
||||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
|
||||||
|
|
||||||
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
|
|
||||||
|
|
||||||
You can also preview your report before submitting it.
|
|
||||||
- type: textarea
|
|
||||||
id: description
|
|
||||||
attributes:
|
|
||||||
label: Description
|
|
||||||
description: Describe the problem that you are experiencing
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: reproduction_steps
|
|
||||||
attributes:
|
|
||||||
label: Steps to reproduce
|
|
||||||
description: |
|
|
||||||
Describe the series of steps that leads you to the problem.
|
|
||||||
|
|
||||||
Describe how what happens differs from what you expected.
|
|
||||||
placeholder: Tell us what you see!
|
|
||||||
value: |
|
|
||||||
- list the steps
|
|
||||||
- that reproduce the bug
|
|
||||||
- using hyphens as bullet points
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
---
|
|
||||||
|
|
||||||
**IMPORTANT**: please answer the following questions, to help us narrow down the problem.
|
|
||||||
- type: input
|
|
||||||
id: homeserver
|
|
||||||
attributes:
|
|
||||||
label: Homeserver
|
|
||||||
description: Which homeserver was this issue identified on? (matrix.org, another homeserver, etc)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: version
|
|
||||||
attributes:
|
|
||||||
label: Synapse Version
|
|
||||||
description: |
|
|
||||||
What version of Synapse is this homeserver running?
|
|
||||||
|
|
||||||
You can find the Synapse version by visiting https://yourserver.example.com/_matrix/federation/v1/version
|
|
||||||
|
|
||||||
or with this command:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
|
||||||
```
|
|
||||||
|
|
||||||
(You may need to replace `localhost:8008` if Synapse is not configured to listen on that port.)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
id: install_method
|
|
||||||
attributes:
|
|
||||||
label: Installation Method
|
|
||||||
options:
|
|
||||||
- Docker (matrixdotorg/synapse)
|
|
||||||
- Debian packages from packages.matrix.org
|
|
||||||
- pip (from PyPI)
|
|
||||||
- Other (please mention below)
|
|
||||||
- type: textarea
|
|
||||||
id: platform
|
|
||||||
attributes:
|
|
||||||
label: Platform
|
|
||||||
description: |
|
|
||||||
Tell us about the environment in which your homeserver is operating...
|
|
||||||
e.g. distro, hardware, if it's running in a vm/container, etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: logs
|
|
||||||
attributes:
|
|
||||||
label: Relevant log output
|
|
||||||
description: |
|
|
||||||
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
|
|
||||||
This will be automatically formatted into code, so there is no need for backticks.
|
|
||||||
|
|
||||||
Please be careful to remove any personal or private data.
|
|
||||||
|
|
||||||
**Bug reports are usually very difficult to diagnose without logging.**
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: anything_else
|
|
||||||
attributes:
|
|
||||||
label: Anything else that would be useful to know?
|
|
||||||
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +1,12 @@
|
|||||||
### Pull Request Checklist
|
### Pull Request Checklist
|
||||||
|
|
||||||
<!-- Please read https://matrix-org.github.io/synapse/latest/development/contributing_guide.html before submitting your pull request -->
|
<!-- Please read CONTRIBUTING.md before submitting your pull request -->
|
||||||
|
|
||||||
* [ ] Pull request is based on the develop branch
|
* [ ] Pull request is based on the develop branch
|
||||||
* [ ] Pull request includes a [changelog file](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should:
|
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#changelog). The entry should:
|
||||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
||||||
- Use markdown where necessary, mostly for `code blocks`.
|
- Use markdown where necessary, mostly for `code blocks`.
|
||||||
- End with either a period (.) or an exclamation mark (!).
|
- End with either a period (.) or an exclamation mark (!).
|
||||||
- Start with a capital letter.
|
- Start with a capital letter.
|
||||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#sign-off)
|
||||||
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
|
* [ ] Code style is correct (run the [linters](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#code-style))
|
||||||
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
|
|
||||||
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
|
||||||
|
|||||||
57
.github/workflows/docker.yml
vendored
57
.github/workflows/docker.yml
vendored
@@ -1,57 +0,0 @@
|
|||||||
# GitHub actions workflow which builds and publishes the docker images.
|
|
||||||
|
|
||||||
name: Build docker images
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags: ["v*"]
|
|
||||||
branches: [ master, main, develop ]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Set up QEMU
|
|
||||||
id: qemu
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
with:
|
|
||||||
platforms: arm64
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
|
|
||||||
- name: Inspect builder
|
|
||||||
run: docker buildx inspect
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Calculate docker image tag
|
|
||||||
id: set-tag
|
|
||||||
uses: docker/metadata-action@master
|
|
||||||
with:
|
|
||||||
images: matrixdotorg/synapse
|
|
||||||
flavor: |
|
|
||||||
latest=false
|
|
||||||
tags: |
|
|
||||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
|
||||||
type=pep440,pattern={{raw}}
|
|
||||||
|
|
||||||
- name: Build and push all platforms
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
labels: "gitsha1=${{ github.sha }}"
|
|
||||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
|
||||||
file: "docker/Dockerfile"
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
65
.github/workflows/docs.yaml
vendored
65
.github/workflows/docs.yaml
vendored
@@ -1,65 +0,0 @@
|
|||||||
name: Deploy the documentation
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
# For bleeding-edge documentation
|
|
||||||
- develop
|
|
||||||
# For documentation specific to a release
|
|
||||||
- 'release-v*'
|
|
||||||
# stable docs
|
|
||||||
- master
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
pages:
|
|
||||||
name: GitHub Pages
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Setup mdbook
|
|
||||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
|
||||||
with:
|
|
||||||
mdbook-version: '0.4.17'
|
|
||||||
|
|
||||||
- name: Build the documentation
|
|
||||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
|
||||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
|
||||||
# as the default. Let's opt for the welcome page instead.
|
|
||||||
run: |
|
|
||||||
mdbook build
|
|
||||||
cp book/welcome_and_overview.html book/index.html
|
|
||||||
|
|
||||||
# Figure out the target directory.
|
|
||||||
#
|
|
||||||
# The target directory depends on the name of the branch
|
|
||||||
#
|
|
||||||
- name: Get the target directory name
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
# first strip the 'refs/heads/' prefix with some shell foo
|
|
||||||
branch="${GITHUB_REF#refs/heads/}"
|
|
||||||
|
|
||||||
case $branch in
|
|
||||||
release-*)
|
|
||||||
# strip 'release-' from the name for release branches.
|
|
||||||
branch="${branch#release-}"
|
|
||||||
;;
|
|
||||||
master)
|
|
||||||
# deploy to "latest" for the master branch.
|
|
||||||
branch="latest"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
# finally, set the 'branch-version' var.
|
|
||||||
echo "::set-output name=branch-version::$branch"
|
|
||||||
|
|
||||||
# Deploy to the target directory.
|
|
||||||
- name: Deploy to gh pages
|
|
||||||
uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
|
|
||||||
with:
|
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
publish_dir: ./book
|
|
||||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
|
||||||
159
.github/workflows/latest_deps.yml
vendored
159
.github/workflows/latest_deps.yml
vendored
@@ -1,159 +0,0 @@
|
|||||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
|
||||||
# dependencies which match the broad requirements. Since most CI runs are against
|
|
||||||
# the locked poetry environment, run specifically against the latest dependencies to
|
|
||||||
# know if there's an upcoming breaking change.
|
|
||||||
#
|
|
||||||
# As an overview this workflow:
|
|
||||||
# - checks out develop,
|
|
||||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
|
||||||
# - runs mypy and test suites in that checkout.
|
|
||||||
#
|
|
||||||
# Based on the twisted trunk CI job.
|
|
||||||
|
|
||||||
name: Latest dependencies
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: 0 7 * * *
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mypy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
|
||||||
# poetry-core versions), so we install with poetry.
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
poetry-version: "1.2.0b1"
|
|
||||||
extras: "all"
|
|
||||||
# Dump installed versions for debugging.
|
|
||||||
- run: poetry run pip list > before.txt
|
|
||||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
|
||||||
# `pip install matrix-synapse[all]` as closely as possible.
|
|
||||||
- run: poetry update --no-dev
|
|
||||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
|
||||||
- name: Remove warn_unused_ignores from mypy config
|
|
||||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
|
||||||
- run: poetry run mypy
|
|
||||||
trial:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- database: "sqlite"
|
|
||||||
- database: "postgres"
|
|
||||||
postgres-version: "14"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
run: |
|
|
||||||
docker run -d -p 5432:5432 \
|
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
|
||||||
postgres:${{ matrix.postgres-version }}
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- run: pip install .[all,test]
|
|
||||||
- name: Await PostgreSQL
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
timeout-minutes: 2
|
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
|
||||||
- run: python -m twisted.trial --jobs=2 tests
|
|
||||||
env:
|
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:testing
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- sytest-tag: focal
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: postgres
|
|
||||||
workers: workers
|
|
||||||
redis: redis
|
|
||||||
env:
|
|
||||||
POSTGRES: ${{ matrix.postgres && 1}}
|
|
||||||
WORKERS: ${{ matrix.workers && 1 }}
|
|
||||||
REDIS: ${{ matrix.redis && 1 }}
|
|
||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Ensure sytest runs `pip install`
|
|
||||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
|
||||||
run: rm /src/poetry.lock
|
|
||||||
working-directory: /src
|
|
||||||
- name: Prepare test blacklist
|
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: run complement (as with twisted trunk, see #12473).
|
|
||||||
|
|
||||||
# open an issue if the build fails, so we know about it.
|
|
||||||
open-issue:
|
|
||||||
if: failure()
|
|
||||||
needs:
|
|
||||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
|
||||||
- mypy
|
|
||||||
- trial
|
|
||||||
- sytest
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
update_existing: true
|
|
||||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
|
||||||
|
|
||||||
121
.github/workflows/release-artifacts.yml
vendored
121
.github/workflows/release-artifacts.yml
vendored
@@ -1,121 +0,0 @@
|
|||||||
# GitHub actions workflow which builds the release artifacts.
|
|
||||||
|
|
||||||
name: Build release artifacts
|
|
||||||
|
|
||||||
on:
|
|
||||||
# we build on PRs and develop to (hopefully) get early warning
|
|
||||||
# of things breaking (but only build one set of debs)
|
|
||||||
pull_request:
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
|
|
||||||
# we do the full build on tags.
|
|
||||||
tags: ["v*"]
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
get-distros:
|
|
||||||
name: "Calculate list of debian distros"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- id: set-distros
|
|
||||||
run: |
|
|
||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
|
||||||
dists='["debian:sid"]'
|
|
||||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
|
||||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
|
||||||
fi
|
|
||||||
echo "::set-output name=distros::$dists"
|
|
||||||
# map the step outputs to job outputs
|
|
||||||
outputs:
|
|
||||||
distros: ${{ steps.set-distros.outputs.distros }}
|
|
||||||
|
|
||||||
# now build the packages with a matrix build.
|
|
||||||
build-debs:
|
|
||||||
needs: get-distros
|
|
||||||
name: "Build .deb packages"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
distro: ${{ fromJson(needs.get-distros.outputs.distros) }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: src
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
with:
|
|
||||||
install: true
|
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: /tmp/.buildx-cache
|
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-buildx-
|
|
||||||
|
|
||||||
- name: Set up python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
|
|
||||||
- name: Build the packages
|
|
||||||
# see https://github.com/docker/build-push-action/issues/252
|
|
||||||
# for the cache magic here
|
|
||||||
run: |
|
|
||||||
./src/scripts-dev/build_debian_packages.py \
|
|
||||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
|
||||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
|
||||||
--docker-build-arg=--progress=plain \
|
|
||||||
--docker-build-arg=--load \
|
|
||||||
"${{ matrix.distro }}"
|
|
||||||
rm -rf /tmp/.buildx-cache
|
|
||||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: debs
|
|
||||||
path: debs/*
|
|
||||||
|
|
||||||
build-sdist:
|
|
||||||
name: "Build pypi distribution files"
|
|
||||||
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
|
|
||||||
|
|
||||||
# if it's a tag, create a release and attach the artifacts to it
|
|
||||||
attach-assets:
|
|
||||||
name: "Attach assets to release"
|
|
||||||
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
|
||||||
needs:
|
|
||||||
- build-debs
|
|
||||||
- build-sdist
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Download all workflow run artifacts
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
- name: Build a tarball for the debs
|
|
||||||
run: tar -cvJf debs.tar.xz debs
|
|
||||||
- name: Attach to release
|
|
||||||
uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
Sdist/*
|
|
||||||
Wheel/*
|
|
||||||
debs.tar.xz
|
|
||||||
# if it's not already published, keep the release as a draft.
|
|
||||||
draft: true
|
|
||||||
# mark it as a prerelease if the tag contains 'rc'.
|
|
||||||
prerelease: ${{ contains(github.ref, 'rc') }}
|
|
||||||
435
.github/workflows/tests.yml
vendored
435
.github/workflows/tests.yml
vendored
@@ -1,435 +0,0 @@
|
|||||||
name: Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-sampleconfig:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: pip install .
|
|
||||||
- run: scripts-dev/generate_sample_config.sh --check
|
|
||||||
- run: scripts-dev/config-lint.sh
|
|
||||||
|
|
||||||
check-schema-delta:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
|
||||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
|
||||||
|
|
||||||
lint:
|
|
||||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
|
||||||
with:
|
|
||||||
typechecking-extras: "all"
|
|
||||||
|
|
||||||
lint-crlf:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Check line endings
|
|
||||||
run: scripts-dev/check_line_terminators.sh
|
|
||||||
|
|
||||||
lint-newsfile:
|
|
||||||
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
|
||||||
- run: scripts-dev/check-newsfragment.sh
|
|
||||||
env:
|
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
|
||||||
|
|
||||||
# Dummy step to gate other tests on without repeating the whole list
|
|
||||||
linting-done:
|
|
||||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
|
||||||
needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig, check-schema-delta]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- run: "true"
|
|
||||||
|
|
||||||
trial:
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
|
||||||
database: ["sqlite"]
|
|
||||||
extras: ["all"]
|
|
||||||
include:
|
|
||||||
# Newest Python without optional deps
|
|
||||||
- python-version: "3.10"
|
|
||||||
extras: ""
|
|
||||||
|
|
||||||
# Oldest Python with PostgreSQL
|
|
||||||
- python-version: "3.7"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "10"
|
|
||||||
extras: "all"
|
|
||||||
|
|
||||||
# Newest Python with newest PostgreSQL
|
|
||||||
- python-version: "3.10"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "14"
|
|
||||||
extras: "all"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
run: |
|
|
||||||
docker run -d -p 5432:5432 \
|
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
|
||||||
postgres:${{ matrix.postgres-version }}
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: ${{ matrix.extras }}
|
|
||||||
- name: Await PostgreSQL
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
timeout-minutes: 2
|
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
|
||||||
- run: poetry run trial --jobs=2 tests
|
|
||||||
env:
|
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-olddeps:
|
|
||||||
# Note: sqlite only; no postgres
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Test with old deps
|
|
||||||
uses: docker://ubuntu:focal # For old python and sqlite
|
|
||||||
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
|
||||||
# See https://github.com/matrix-org/synapse/issues/12343
|
|
||||||
with:
|
|
||||||
workdir: /github/workspace
|
|
||||||
entrypoint: .ci/scripts/test_old_deps.sh
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-pypy:
|
|
||||||
# Very slow; only run if the branch name includes 'pypy'
|
|
||||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
|
||||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["pypy-3.7"]
|
|
||||||
extras: ["all"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: ${{ matrix.extras }}
|
|
||||||
- run: poetry run trial --jobs=2 tests
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
env:
|
|
||||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
|
||||||
POSTGRES: ${{ matrix.postgres && 1}}
|
|
||||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
|
||||||
WORKERS: ${{ matrix.workers && 1 }}
|
|
||||||
REDIS: ${{ matrix.redis && 1 }}
|
|
||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- sytest-tag: focal
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: testing
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: postgres
|
|
||||||
workers: workers
|
|
||||||
redis: redis
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Prepare test blacklist
|
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
export-data:
|
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: [linting-done, portdb]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: "postgres"
|
|
||||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: "postgres"
|
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
|
||||||
|
|
||||||
portdb:
|
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- python-version: "3.7"
|
|
||||||
postgres-version: "10"
|
|
||||||
|
|
||||||
- python-version: "3.10"
|
|
||||||
postgres-version: "14"
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:${{ matrix.postgres-version }}
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: "postgres"
|
|
||||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: "postgres"
|
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
|
||||||
|
|
||||||
complement:
|
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- arrangement: monolith
|
|
||||||
database: SQLite
|
|
||||||
|
|
||||||
- arrangement: monolith
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
|
||||||
- name: "Set Go Version"
|
|
||||||
run: |
|
|
||||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
|
||||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
|
||||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
|
||||||
echo "~/go/bin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: "Install Complement Dependencies"
|
|
||||||
run: |
|
|
||||||
sudo apt-get update && sudo apt-get install -y libolm3 libolm-dev
|
|
||||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
|
||||||
|
|
||||||
- name: Run actions/checkout@v2 for synapse
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: synapse
|
|
||||||
|
|
||||||
- name: "Install custom gotestfmt template"
|
|
||||||
run: |
|
|
||||||
mkdir .gotestfmt/github -p
|
|
||||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
|
||||||
|
|
||||||
# Attempt to check out the same branch of Complement as the PR. If it
|
|
||||||
# doesn't exist, fallback to HEAD.
|
|
||||||
- name: Checkout complement
|
|
||||||
run: synapse/.ci/scripts/checkout_complement.sh
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
set -o pipefail
|
|
||||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
|
||||||
shell: bash
|
|
||||||
name: Run Complement Tests
|
|
||||||
|
|
||||||
# We only run the workers tests on `develop` for now, because they're too slow to wait for on PRs.
|
|
||||||
# Sadly, you can't have an `if` condition on the value of a matrix, so this is a temporary, separate job for now.
|
|
||||||
# GitHub Actions doesn't support YAML anchors, so it's full-on duplication for now.
|
|
||||||
complement-developonly:
|
|
||||||
if: "${{ !failure() && !cancelled() && (github.ref == 'refs/heads/develop') }}"
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
name: "Complement Workers (develop only)"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
|
||||||
- name: "Set Go Version"
|
|
||||||
run: |
|
|
||||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
|
||||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
|
||||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
|
||||||
echo "~/go/bin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: "Install Complement Dependencies"
|
|
||||||
run: |
|
|
||||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
|
||||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
|
||||||
|
|
||||||
- name: Run actions/checkout@v2 for synapse
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: synapse
|
|
||||||
|
|
||||||
- name: "Install custom gotestfmt template"
|
|
||||||
run: |
|
|
||||||
mkdir .gotestfmt/github -p
|
|
||||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
|
||||||
|
|
||||||
# Attempt to check out the same branch of Complement as the PR. If it
|
|
||||||
# doesn't exist, fallback to HEAD.
|
|
||||||
- name: Checkout complement
|
|
||||||
run: synapse/.ci/scripts/checkout_complement.sh
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
set -o pipefail
|
|
||||||
WORKERS=1 COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
|
||||||
shell: bash
|
|
||||||
name: Run Complement Tests
|
|
||||||
|
|
||||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
|
||||||
tests-done:
|
|
||||||
if: ${{ always() }}
|
|
||||||
needs:
|
|
||||||
- check-sampleconfig
|
|
||||||
- lint
|
|
||||||
- lint-crlf
|
|
||||||
- lint-newsfile
|
|
||||||
- trial
|
|
||||||
- trial-olddeps
|
|
||||||
- sytest
|
|
||||||
- export-data
|
|
||||||
- portdb
|
|
||||||
- complement
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: matrix-org/done-action@v2
|
|
||||||
with:
|
|
||||||
needs: ${{ toJSON(needs) }}
|
|
||||||
|
|
||||||
# The newsfile lint may be skipped on non PR builds
|
|
||||||
skippable:
|
|
||||||
lint-newsfile
|
|
||||||
116
.github/workflows/twisted_trunk.yml
vendored
116
.github/workflows/twisted_trunk.yml
vendored
@@ -1,116 +0,0 @@
|
|||||||
name: Twisted Trunk
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: 0 8 * * *
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mypy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
extras: "all"
|
|
||||||
- run: |
|
|
||||||
poetry remove twisted
|
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry install --no-interaction --extras "all test"
|
|
||||||
- name: Remove warn_unused_ignores from mypy config
|
|
||||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
|
||||||
- run: poetry run mypy
|
|
||||||
|
|
||||||
trial:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
extras: "all test"
|
|
||||||
- run: |
|
|
||||||
poetry remove twisted
|
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry install --no-interaction --extras "all test"
|
|
||||||
- run: poetry run trial --jobs 2 tests
|
|
||||||
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:buster
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Patch dependencies
|
|
||||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
|
||||||
# but the sytest-synapse container expects it to be in /venv/.
|
|
||||||
# We symlink it before running poetry so that poetry actually
|
|
||||||
# ends up installing to `/venv`.
|
|
||||||
run: |
|
|
||||||
ln -s -T /venv /src/.venv
|
|
||||||
poetry remove twisted
|
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry install --no-interaction --extras "all test"
|
|
||||||
working-directory: /src
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
env:
|
|
||||||
# Use offline mode to avoid reinstalling the pinned version of
|
|
||||||
# twisted.
|
|
||||||
OFFLINE: 1
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
# open an issue if the build fails, so we know about it.
|
|
||||||
open-issue:
|
|
||||||
if: failure()
|
|
||||||
needs:
|
|
||||||
- mypy
|
|
||||||
- trial
|
|
||||||
- sytest
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
update_existing: true
|
|
||||||
filename: .ci/twisted_trunk_build_failed_issue_template.md
|
|
||||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -6,17 +6,13 @@
|
|||||||
*.egg
|
*.egg
|
||||||
*.egg-info
|
*.egg-info
|
||||||
*.lock
|
*.lock
|
||||||
*.py[cod]
|
*.pyc
|
||||||
*.snap
|
*.snap
|
||||||
*.tac
|
*.tac
|
||||||
_trial_temp/
|
_trial_temp/
|
||||||
_trial_temp*/
|
_trial_temp*/
|
||||||
/out
|
/out
|
||||||
.DS_Store
|
.DS_Store
|
||||||
__pycache__/
|
|
||||||
|
|
||||||
# We do want the poetry lockfile.
|
|
||||||
!poetry.lock
|
|
||||||
|
|
||||||
# stuff that is likely to exist when you run a server locally
|
# stuff that is likely to exist when you run a server locally
|
||||||
/*.db
|
/*.db
|
||||||
@@ -33,9 +29,6 @@ __pycache__/
|
|||||||
/media_store/
|
/media_store/
|
||||||
/uploads
|
/uploads
|
||||||
|
|
||||||
# For direnv users
|
|
||||||
/.envrc
|
|
||||||
|
|
||||||
# IDEs
|
# IDEs
|
||||||
/.idea/
|
/.idea/
|
||||||
/.ropeproject/
|
/.ropeproject/
|
||||||
@@ -46,17 +39,9 @@ __pycache__/
|
|||||||
/.coverage*
|
/.coverage*
|
||||||
/.mypy_cache/
|
/.mypy_cache/
|
||||||
/.tox
|
/.tox
|
||||||
/.tox-pg-container
|
|
||||||
/build/
|
/build/
|
||||||
/coverage.*
|
/coverage.*
|
||||||
/dist/
|
/dist/
|
||||||
/docs/build/
|
/docs/build/
|
||||||
/htmlcov
|
/htmlcov
|
||||||
/pip-wheel-metadata/
|
/pip-wheel-metadata/
|
||||||
|
|
||||||
# docs
|
|
||||||
book/
|
|
||||||
|
|
||||||
# complement
|
|
||||||
/complement-*
|
|
||||||
/master.tar.gz
|
|
||||||
|
|||||||
7898
CHANGES.md
7898
CHANGES.md
File diff suppressed because it is too large
Load Diff
291
CONTRIBUTING.md
291
CONTRIBUTING.md
@@ -1,3 +1,290 @@
|
|||||||
# Welcome to Synapse
|
# Contributing code to Synapse
|
||||||
|
|
||||||
Please see the [contributors' guide](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html) in our rendered documentation.
|
Everyone is welcome to contribute code to [matrix.org
|
||||||
|
projects](https://github.com/matrix-org), provided that they are willing to
|
||||||
|
license their contributions under the same license as the project itself. We
|
||||||
|
follow a simple 'inbound=outbound' model for contributions: the act of
|
||||||
|
submitting an 'inbound' contribution means that the contributor agrees to
|
||||||
|
license the code under the same terms as the project's overall 'outbound'
|
||||||
|
license - in our case, this is almost always Apache Software License v2 (see
|
||||||
|
[LICENSE](LICENSE)).
|
||||||
|
|
||||||
|
## How to contribute
|
||||||
|
|
||||||
|
The preferred and easiest way to contribute changes is to fork the relevant
|
||||||
|
project on github, and then [create a pull request](
|
||||||
|
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
||||||
|
changes into our repo.
|
||||||
|
|
||||||
|
Some other points to follow:
|
||||||
|
|
||||||
|
* Please base your changes on the `develop` branch.
|
||||||
|
|
||||||
|
* Please follow the [code style requirements](#code-style).
|
||||||
|
|
||||||
|
* Please include a [changelog entry](#changelog) with each PR.
|
||||||
|
|
||||||
|
* Please [sign off](#sign-off) your contribution.
|
||||||
|
|
||||||
|
* Please keep an eye on the pull request for feedback from the [continuous
|
||||||
|
integration system](#continuous-integration-and-testing) and try to fix any
|
||||||
|
errors that come up.
|
||||||
|
|
||||||
|
* If you need to [update your PR](#updating-your-pull-request), just add new
|
||||||
|
commits to your branch rather than rebasing.
|
||||||
|
|
||||||
|
## Code style
|
||||||
|
|
||||||
|
Synapse's code style is documented [here](docs/code_style.md). Please follow
|
||||||
|
it, including the conventions for the [sample configuration
|
||||||
|
file](docs/code_style.md#configuration-file-format).
|
||||||
|
|
||||||
|
Many of the conventions are enforced by scripts which are run as part of the
|
||||||
|
[continuous integration system](#continuous-integration-and-testing). To help
|
||||||
|
check if you have followed the code style, you can run `scripts-dev/lint.sh`
|
||||||
|
locally. You'll need python 3.6 or later, and to install a number of tools:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Install the dependencies
|
||||||
|
pip install -e ".[lint,mypy]"
|
||||||
|
|
||||||
|
# Run the linter script
|
||||||
|
./scripts-dev/lint.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note that the script does not just test/check, but also reformats code, so you
|
||||||
|
may wish to ensure any new code is committed first**.
|
||||||
|
|
||||||
|
By default, this script checks all files and can take some time; if you alter
|
||||||
|
only certain files, you might wish to specify paths as arguments to reduce the
|
||||||
|
run-time:
|
||||||
|
|
||||||
|
```
|
||||||
|
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also provide the `-d` option, which will lint the files that have been
|
||||||
|
changed since the last git commit. This will often be significantly faster than
|
||||||
|
linting the whole codebase.
|
||||||
|
|
||||||
|
Before pushing new changes, ensure they don't produce linting errors. Commit any
|
||||||
|
files that were corrected.
|
||||||
|
|
||||||
|
Please ensure your changes match the cosmetic style of the existing project,
|
||||||
|
and **never** mix cosmetic and functional changes in the same commit, as it
|
||||||
|
makes it horribly hard to review otherwise.
|
||||||
|
|
||||||
|
## Changelog
|
||||||
|
|
||||||
|
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||||
|
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
|
||||||
|
|
||||||
|
To create a changelog entry, make a new file in the `changelog.d` directory named
|
||||||
|
in the format of `PRnumber.type`. The type can be one of the following:
|
||||||
|
|
||||||
|
* `feature`
|
||||||
|
* `bugfix`
|
||||||
|
* `docker` (for updates to the Docker image)
|
||||||
|
* `doc` (for updates to the documentation)
|
||||||
|
* `removal` (also used for deprecations)
|
||||||
|
* `misc` (for internal-only changes)
|
||||||
|
|
||||||
|
This file will become part of our [changelog](
|
||||||
|
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
|
||||||
|
release, so the content of the file should be a short description of your
|
||||||
|
change in the same style as the rest of the changelog. The file can contain Markdown
|
||||||
|
formatting, and should end with a full stop (.) or an exclamation mark (!) for
|
||||||
|
consistency.
|
||||||
|
|
||||||
|
Adding credits to the changelog is encouraged, we value your
|
||||||
|
contributions and would like to have you shouted out in the release notes!
|
||||||
|
|
||||||
|
For example, a fix in PR #1234 would have its changelog entry in
|
||||||
|
`changelog.d/1234.bugfix`, and contain content like:
|
||||||
|
|
||||||
|
> The security levels of Florbs are now validated when received
|
||||||
|
> via the `/federation/florb` endpoint. Contributed by Jane Matrix.
|
||||||
|
|
||||||
|
If there are multiple pull requests involved in a single bugfix/feature/etc,
|
||||||
|
then the content for each `changelog.d` file should be the same. Towncrier will
|
||||||
|
merge the matching files together into a single changelog entry when we come to
|
||||||
|
release.
|
||||||
|
|
||||||
|
### How do I know what to call the changelog file before I create the PR?
|
||||||
|
|
||||||
|
Obviously, you don't know if you should call your newsfile
|
||||||
|
`1234.bugfix` or `5678.bugfix` until you create the PR, which leads to a
|
||||||
|
chicken-and-egg problem.
|
||||||
|
|
||||||
|
There are two options for solving this:
|
||||||
|
|
||||||
|
1. Open the PR without a changelog file, see what number you got, and *then*
|
||||||
|
add the changelog file to your branch (see [Updating your pull
|
||||||
|
request](#updating-your-pull-request)), or:
|
||||||
|
|
||||||
|
1. Look at the [list of all
|
||||||
|
issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the
|
||||||
|
highest number you see, and quickly open the PR before somebody else claims
|
||||||
|
your number.
|
||||||
|
|
||||||
|
[This
|
||||||
|
script](https://github.com/richvdh/scripts/blob/master/next_github_number.sh)
|
||||||
|
might be helpful if you find yourself doing this a lot.
|
||||||
|
|
||||||
|
Sorry, we know it's a bit fiddly, but it's *really* helpful for us when we come
|
||||||
|
to put together a release!
|
||||||
|
|
||||||
|
### Debian changelog
|
||||||
|
|
||||||
|
Changes which affect the debian packaging files (in `debian`) are an
|
||||||
|
exception to the rule that all changes require a `changelog.d` file.
|
||||||
|
|
||||||
|
In this case, you will need to add an entry to the debian changelog for the
|
||||||
|
next release. For this, run the following command:
|
||||||
|
|
||||||
|
```
|
||||||
|
dch
|
||||||
|
```
|
||||||
|
|
||||||
|
This will make up a new version number (if there isn't already an unreleased
|
||||||
|
version in flight), and open an editor where you can add a new changelog entry.
|
||||||
|
(Our release process will ensure that the version number and maintainer name is
|
||||||
|
corrected for the release.)
|
||||||
|
|
||||||
|
If your change affects both the debian packaging *and* files outside the debian
|
||||||
|
directory, you will need both a regular newsfragment *and* an entry in the
|
||||||
|
debian changelog. (Though typically such changes should be submitted as two
|
||||||
|
separate pull requests.)
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
There is a growing amount of documentation located in the [docs](docs)
|
||||||
|
directory. This documentation is intended primarily for sysadmins running their
|
||||||
|
own Synapse instance, as well as developers interacting externally with
|
||||||
|
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
|
||||||
|
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
|
||||||
|
regarding Synapse's Admin API, which is used mostly by sysadmins and external
|
||||||
|
service developers.
|
||||||
|
|
||||||
|
New files added to both folders should be written in [Github-Flavoured
|
||||||
|
Markdown](https://guides.github.com/features/mastering-markdown/), and attempts
|
||||||
|
should be made to migrate existing documents to markdown where possible.
|
||||||
|
|
||||||
|
Some documentation also exists in [Synapse's Github
|
||||||
|
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
||||||
|
contributed to by community authors.
|
||||||
|
|
||||||
|
## Sign off
|
||||||
|
|
||||||
|
In order to have a concrete record that your contribution is intentional
|
||||||
|
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||||
|
same lightweight approach that the Linux Kernel
|
||||||
|
[submitting patches process](
|
||||||
|
https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>),
|
||||||
|
[Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||||
|
projects use: the DCO (Developer Certificate of Origin:
|
||||||
|
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||||
|
the contribution or otherwise have the right to contribute it to Matrix:
|
||||||
|
|
||||||
|
```
|
||||||
|
Developer Certificate of Origin
|
||||||
|
Version 1.1
|
||||||
|
|
||||||
|
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||||
|
660 York Street, Suite 102,
|
||||||
|
San Francisco, CA 94110 USA
|
||||||
|
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies of this
|
||||||
|
license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Developer's Certificate of Origin 1.1
|
||||||
|
|
||||||
|
By making a contribution to this project, I certify that:
|
||||||
|
|
||||||
|
(a) The contribution was created in whole or in part by me and I
|
||||||
|
have the right to submit it under the open source license
|
||||||
|
indicated in the file; or
|
||||||
|
|
||||||
|
(b) The contribution is based upon previous work that, to the best
|
||||||
|
of my knowledge, is covered under an appropriate open source
|
||||||
|
license and I have the right under that license to submit that
|
||||||
|
work with modifications, whether created in whole or in part
|
||||||
|
by me, under the same open source license (unless I am
|
||||||
|
permitted to submit under a different license), as indicated
|
||||||
|
in the file; or
|
||||||
|
|
||||||
|
(c) The contribution was provided directly to me by some other
|
||||||
|
person who certified (a), (b) or (c) and I have not modified
|
||||||
|
it.
|
||||||
|
|
||||||
|
(d) I understand and agree that this project and the contribution
|
||||||
|
are public and that a record of the contribution (including all
|
||||||
|
personal information I submit with it, including my sign-off) is
|
||||||
|
maintained indefinitely and may be redistributed consistent with
|
||||||
|
this project or the open source license(s) involved.
|
||||||
|
```
|
||||||
|
|
||||||
|
If you agree to this for your contribution, then all that's needed is to
|
||||||
|
include the line in your commit or pull request comment:
|
||||||
|
|
||||||
|
```
|
||||||
|
Signed-off-by: Your Name <your@email.example.org>
|
||||||
|
```
|
||||||
|
|
||||||
|
We accept contributions under a legally identifiable name, such as
|
||||||
|
your name on government documentation or common-law names (names
|
||||||
|
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||||
|
accept anonymous contributions at this time.
|
||||||
|
|
||||||
|
Git allows you to add this signoff automatically when using the `-s`
|
||||||
|
flag to `git commit`, which uses the name and email set in your
|
||||||
|
`user.name` and `user.email` git configs.
|
||||||
|
|
||||||
|
## Continuous integration and testing
|
||||||
|
|
||||||
|
[Buildkite](https://buildkite.com/matrix-dot-org/synapse) will automatically
|
||||||
|
run a series of checks and tests against any PR which is opened against the
|
||||||
|
project; if your change breaks the build, this will be shown in GitHub, with
|
||||||
|
links to the build results. If your build fails, please try to fix the errors
|
||||||
|
and update your branch.
|
||||||
|
|
||||||
|
To run unit tests in a local development environment, you can use:
|
||||||
|
|
||||||
|
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||||
|
for SQLite-backed Synapse on Python 3.5.
|
||||||
|
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||||
|
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||||
|
(requires a running local PostgreSQL with access to create databases).
|
||||||
|
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||||
|
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||||
|
set up PostgreSQL yourself.
|
||||||
|
|
||||||
|
Docker images are available for running the integration tests (SyTest) locally,
|
||||||
|
see the [documentation in the SyTest repo](
|
||||||
|
https://github.com/matrix-org/sytest/blob/develop/docker/README.md) for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
## Updating your pull request
|
||||||
|
|
||||||
|
If you decide to make changes to your pull request - perhaps to address issues
|
||||||
|
raised in a review, or to fix problems highlighted by [continuous
|
||||||
|
integration](#continuous-integration-and-testing) - just add new commits to your
|
||||||
|
branch, and push to GitHub. The pull request will automatically be updated.
|
||||||
|
|
||||||
|
Please **avoid** rebasing your branch, especially once the PR has been
|
||||||
|
reviewed: doing so makes it very difficult for a reviewer to see what has
|
||||||
|
changed since a previous review.
|
||||||
|
|
||||||
|
## Notes for maintainers on merging PRs etc
|
||||||
|
|
||||||
|
There are some notes for those with commit access to the project on how we
|
||||||
|
manage git [here](docs/dev/git.md).
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
That's it! Matrix is a very open and collaborative project as you might expect
|
||||||
|
given our obsession with open communication. If we're going to successfully
|
||||||
|
matrix together all the fragmented communication technologies out there we are
|
||||||
|
reliant on contributions and collaboration from the community to do so. So
|
||||||
|
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||||
|
do!
|
||||||
|
|||||||
597
INSTALL.md
597
INSTALL.md
@@ -1,7 +1,596 @@
|
|||||||
# Installation Instructions
|
# Installation Instructions
|
||||||
|
|
||||||
This document has moved to the
|
There are 3 steps to follow under **Installation Instructions**.
|
||||||
[Synapse documentation website](https://matrix-org.github.io/synapse/latest/setup/installation.html).
|
|
||||||
Please update your links.
|
|
||||||
|
|
||||||
The markdown source is available in [docs/setup/installation.md](docs/setup/installation.md).
|
- [Installation Instructions](#installation-instructions)
|
||||||
|
- [Choosing your server name](#choosing-your-server-name)
|
||||||
|
- [Installing Synapse](#installing-synapse)
|
||||||
|
- [Installing from source](#installing-from-source)
|
||||||
|
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||||
|
- [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
|
||||||
|
- [ArchLinux](#archlinux)
|
||||||
|
- [CentOS/Fedora](#centosfedora)
|
||||||
|
- [macOS](#macos)
|
||||||
|
- [OpenSUSE](#opensuse)
|
||||||
|
- [OpenBSD](#openbsd)
|
||||||
|
- [Windows](#windows)
|
||||||
|
- [Prebuilt packages](#prebuilt-packages)
|
||||||
|
- [Docker images and Ansible playbooks](#docker-images-and-ansible-playbooks)
|
||||||
|
- [Debian/Ubuntu](#debianubuntu)
|
||||||
|
- [Matrix.org packages](#matrixorg-packages)
|
||||||
|
- [Downstream Debian packages](#downstream-debian-packages)
|
||||||
|
- [Downstream Ubuntu packages](#downstream-ubuntu-packages)
|
||||||
|
- [Fedora](#fedora)
|
||||||
|
- [OpenSUSE](#opensuse-1)
|
||||||
|
- [SUSE Linux Enterprise Server](#suse-linux-enterprise-server)
|
||||||
|
- [ArchLinux](#archlinux-1)
|
||||||
|
- [Void Linux](#void-linux)
|
||||||
|
- [FreeBSD](#freebsd)
|
||||||
|
- [OpenBSD](#openbsd-1)
|
||||||
|
- [NixOS](#nixos)
|
||||||
|
- [Setting up Synapse](#setting-up-synapse)
|
||||||
|
- [Using PostgreSQL](#using-postgresql)
|
||||||
|
- [TLS certificates](#tls-certificates)
|
||||||
|
- [Client Well-Known URI](#client-well-known-uri)
|
||||||
|
- [Email](#email)
|
||||||
|
- [Registering a user](#registering-a-user)
|
||||||
|
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||||
|
- [URL previews](#url-previews)
|
||||||
|
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||||
|
|
||||||
|
## Choosing your server name
|
||||||
|
|
||||||
|
It is important to choose the name for your server before you install Synapse,
|
||||||
|
because it cannot be changed later.
|
||||||
|
|
||||||
|
The server name determines the "domain" part of user-ids for users on your
|
||||||
|
server: these will all be of the format `@user:my.domain.name`. It also
|
||||||
|
determines how other matrix servers will reach yours for federation.
|
||||||
|
|
||||||
|
For a test configuration, set this to the hostname of your server. For a more
|
||||||
|
production-ready setup, you will probably want to specify your domain
|
||||||
|
(`example.com`) rather than a matrix-specific hostname here (in the same way
|
||||||
|
that your email address is probably `user@example.com` rather than
|
||||||
|
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||||
|
[Setting up Federation](docs/federate.md).
|
||||||
|
|
||||||
|
## Installing Synapse
|
||||||
|
|
||||||
|
### Installing from source
|
||||||
|
|
||||||
|
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
||||||
|
|
||||||
|
System requirements:
|
||||||
|
|
||||||
|
- POSIX-compliant system (tested on Linux & OS X)
|
||||||
|
- Python 3.5.2 or later, up to Python 3.9.
|
||||||
|
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||||
|
|
||||||
|
Synapse is written in Python but some of the libraries it uses are written in
|
||||||
|
C. So before we can install Synapse itself we need a working C compiler and the
|
||||||
|
header files for Python C extensions. See [Platform-Specific
|
||||||
|
Instructions](#platform-specific-instructions) for information on installing
|
||||||
|
these on various platforms.
|
||||||
|
|
||||||
|
To install the Synapse homeserver run:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
mkdir -p ~/synapse
|
||||||
|
virtualenv -p python3 ~/synapse/env
|
||||||
|
source ~/synapse/env/bin/activate
|
||||||
|
pip install --upgrade pip
|
||||||
|
pip install --upgrade setuptools
|
||||||
|
pip install matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
This will download Synapse from [PyPI](https://pypi.org/project/matrix-synapse)
|
||||||
|
and install it, along with the python libraries it uses, into a virtual environment
|
||||||
|
under `~/synapse/env`. Feel free to pick a different directory if you
|
||||||
|
prefer.
|
||||||
|
|
||||||
|
This Synapse installation can then be later upgraded by using pip again with the
|
||||||
|
update flag:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
source ~/synapse/env/bin/activate
|
||||||
|
pip install -U matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
Before you can start Synapse, you will need to generate a configuration
|
||||||
|
file. To do this, run (in your virtualenv, as before):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd ~/synapse
|
||||||
|
python -m synapse.app.homeserver \
|
||||||
|
--server-name my.domain.name \
|
||||||
|
--config-path homeserver.yaml \
|
||||||
|
--generate-config \
|
||||||
|
--report-stats=[yes|no]
|
||||||
|
```
|
||||||
|
|
||||||
|
... substituting an appropriate value for `--server-name`.
|
||||||
|
|
||||||
|
This command will generate you a config file that you can then customise, but it will
|
||||||
|
also generate a set of keys for you. These keys will allow your homeserver to
|
||||||
|
identify itself to other homeserver, so don't lose or delete them. It would be
|
||||||
|
wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
||||||
|
change your homeserver's keys, you may find that other homeserver have the
|
||||||
|
old key cached. If you update the signing key, you should change the name of the
|
||||||
|
key in the `<server name>.signing.key` file (the second word) to something
|
||||||
|
different. See the [spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys) for more information on key management).
|
||||||
|
|
||||||
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
|
run (e.g. `~/synapse`), and:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd ~/synapse
|
||||||
|
source env/bin/activate
|
||||||
|
synctl start
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Platform-Specific Instructions
|
||||||
|
|
||||||
|
##### Debian/Ubuntu/Raspbian
|
||||||
|
|
||||||
|
Installing prerequisites on Ubuntu or Debian:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo apt install build-essential python3-dev libffi-dev \
|
||||||
|
python3-pip python3-setuptools sqlite3 \
|
||||||
|
libssl-dev virtualenv libjpeg-dev libxslt1-dev
|
||||||
|
```
|
||||||
|
|
||||||
|
##### ArchLinux
|
||||||
|
|
||||||
|
Installing prerequisites on ArchLinux:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo pacman -S base-devel python python-pip \
|
||||||
|
python-setuptools python-virtualenv sqlite3
|
||||||
|
```
|
||||||
|
|
||||||
|
##### CentOS/Fedora
|
||||||
|
|
||||||
|
Installing prerequisites on CentOS 8 or Fedora>26:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
|
libwebp-devel tk-devel redhat-rpm-config \
|
||||||
|
python3-virtualenv libffi-devel openssl-devel
|
||||||
|
sudo dnf groupinstall "Development Tools"
|
||||||
|
```
|
||||||
|
|
||||||
|
Installing prerequisites on CentOS 7 or Fedora<=25:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
|
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
||||||
|
python3-virtualenv libffi-devel openssl-devel
|
||||||
|
sudo yum groupinstall "Development Tools"
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that Synapse does not support versions of SQLite before 3.11, and CentOS 7
|
||||||
|
uses SQLite 3.7. You may be able to work around this by installing a more
|
||||||
|
recent SQLite version, but it is recommended that you instead use a Postgres
|
||||||
|
database: see [docs/postgres.md](docs/postgres.md).
|
||||||
|
|
||||||
|
##### macOS
|
||||||
|
|
||||||
|
Installing prerequisites on macOS:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
xcode-select --install
|
||||||
|
sudo easy_install pip
|
||||||
|
sudo pip install virtualenv
|
||||||
|
brew install pkg-config libffi
|
||||||
|
```
|
||||||
|
|
||||||
|
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
|
||||||
|
via brew and inform `pip` about it so that `psycopg2` builds:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
brew install openssl@1.1
|
||||||
|
export LDFLAGS="-L/usr/local/opt/openssl/lib"
|
||||||
|
export CPPFLAGS="-I/usr/local/opt/openssl/include"
|
||||||
|
```
|
||||||
|
|
||||||
|
##### OpenSUSE
|
||||||
|
|
||||||
|
Installing prerequisites on openSUSE:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo zypper in -t pattern devel_basis
|
||||||
|
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||||
|
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
||||||
|
```
|
||||||
|
|
||||||
|
##### OpenBSD
|
||||||
|
|
||||||
|
A port of Synapse is available under `net/synapse`. The filesystem
|
||||||
|
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||||
|
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
||||||
|
and mounting it to `/var/synapse` should be taken into consideration.
|
||||||
|
|
||||||
|
To be able to build Synapse's dependency on python the `WRKOBJDIR`
|
||||||
|
(cf. `bsd.port.mk(5)`) for building python, too, needs to be on a filesystem
|
||||||
|
mounted with `wxallowed` (cf. `mount(8)`).
|
||||||
|
|
||||||
|
Creating a `WRKOBJDIR` for building python under `/usr/local` (which on a
|
||||||
|
default OpenBSD installation is mounted with `wxallowed`):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
doas mkdir /usr/local/pobj_wxallowed
|
||||||
|
```
|
||||||
|
|
||||||
|
Assuming `PORTS_PRIVSEP=Yes` (cf. `bsd.port.mk(5)`) and `SUDO=doas` are
|
||||||
|
configured in `/etc/mk.conf`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
doas chown _pbuild:_pbuild /usr/local/pobj_wxallowed
|
||||||
|
```
|
||||||
|
|
||||||
|
Setting the `WRKOBJDIR` for building python:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
echo WRKOBJDIR_lang/python/3.7=/usr/local/pobj_wxallowed \\nWRKOBJDIR_lang/python/2.7=/usr/local/pobj_wxallowed >> /etc/mk.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
Building Synapse:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd /usr/ports/net/synapse
|
||||||
|
make install
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Windows
|
||||||
|
|
||||||
|
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
||||||
|
Linux provides a Linux environment on Windows 10 which is capable of using the
|
||||||
|
Debian, Fedora, or source installation methods. More information about WSL can
|
||||||
|
be found at <https://docs.microsoft.com/en-us/windows/wsl/install-win10> for
|
||||||
|
Windows 10 and <https://docs.microsoft.com/en-us/windows/wsl/install-on-server>
|
||||||
|
for Windows Server.
|
||||||
|
|
||||||
|
### Prebuilt packages
|
||||||
|
|
||||||
|
As an alternative to installing from source, prebuilt packages are available
|
||||||
|
for a number of platforms.
|
||||||
|
|
||||||
|
#### Docker images and Ansible playbooks
|
||||||
|
|
||||||
|
There is an official synapse image available at
|
||||||
|
<https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
|
||||||
|
the docker-compose file available at [contrib/docker](contrib/docker). Further
|
||||||
|
information on this including configuration options is available in the README
|
||||||
|
on hub.docker.com.
|
||||||
|
|
||||||
|
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||||
|
Dockerfile to automate a synapse server in a single Docker image, at
|
||||||
|
<https://hub.docker.com/r/avhost/docker-matrix/tags/>
|
||||||
|
|
||||||
|
Slavi Pantaleev has created an Ansible playbook,
|
||||||
|
which installs the offical Docker image of Matrix Synapse
|
||||||
|
along with many other Matrix-related services (Postgres database, Element, coturn,
|
||||||
|
ma1sd, SSL support, etc.).
|
||||||
|
For more details, see
|
||||||
|
<https://github.com/spantaleev/matrix-docker-ansible-deploy>
|
||||||
|
|
||||||
|
#### Debian/Ubuntu
|
||||||
|
|
||||||
|
##### Matrix.org packages
|
||||||
|
|
||||||
|
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
||||||
|
Synapse via <https://packages.matrix.org/debian/>. They are available for Debian
|
||||||
|
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo apt install -y lsb-release wget apt-transport-https
|
||||||
|
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||||
|
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||||
|
sudo tee /etc/apt/sources.list.d/matrix-org.list
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install matrix-synapse-py3
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: if you followed a previous version of these instructions which
|
||||||
|
recommended using `apt-key add` to add an old key from
|
||||||
|
`https://matrix.org/packages/debian/`, you should note that this key has been
|
||||||
|
revoked. You should remove the old key with `sudo apt-key remove
|
||||||
|
C35EB17E1EAE708E6603A9B3AD0592FE47F0DF61`, and follow the above instructions to
|
||||||
|
update your configuration.
|
||||||
|
|
||||||
|
The fingerprint of the repository signing key (as shown by `gpg
|
||||||
|
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||||
|
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||||
|
|
||||||
|
##### Downstream Debian packages
|
||||||
|
|
||||||
|
We do not recommend using the packages from the default Debian `buster`
|
||||||
|
repository at this time, as they are old and suffer from known security
|
||||||
|
vulnerabilities. You can install the latest version of Synapse from
|
||||||
|
[our repository](#matrixorg-packages) or from `buster-backports`. Please
|
||||||
|
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
||||||
|
for information on how to use backports.
|
||||||
|
|
||||||
|
If you are using Debian `sid` or testing, Synapse is available in the default
|
||||||
|
repositories and it should be possible to install it simply with:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo apt install matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Downstream Ubuntu packages
|
||||||
|
|
||||||
|
We do not recommend using the packages in the default Ubuntu repository
|
||||||
|
at this time, as they are old and suffer from known security vulnerabilities.
|
||||||
|
The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
|
||||||
|
|
||||||
|
#### Fedora
|
||||||
|
|
||||||
|
Synapse is in the Fedora repositories as `matrix-synapse`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo dnf install matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
Oleg Girko provides Fedora RPMs at
|
||||||
|
<https://obs.infoserver.lv/project/monitor/matrix-synapse>
|
||||||
|
|
||||||
|
#### OpenSUSE
|
||||||
|
|
||||||
|
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo zypper install matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SUSE Linux Enterprise Server
|
||||||
|
|
||||||
|
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||||
|
<https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/>
|
||||||
|
|
||||||
|
#### ArchLinux
|
||||||
|
|
||||||
|
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||||
|
<https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
|
||||||
|
the necessary dependencies.
|
||||||
|
|
||||||
|
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo pip install --upgrade pip
|
||||||
|
```
|
||||||
|
|
||||||
|
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||||
|
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||||
|
compile it under the right architecture. (This should not be needed if
|
||||||
|
installing under virtualenv):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo pip uninstall py-bcrypt
|
||||||
|
sudo pip install py-bcrypt
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Void Linux
|
||||||
|
|
||||||
|
Synapse can be found in the void repositories as 'synapse':
|
||||||
|
|
||||||
|
```sh
|
||||||
|
xbps-install -Su
|
||||||
|
xbps-install -S synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
#### FreeBSD
|
||||||
|
|
||||||
|
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||||
|
|
||||||
|
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||||
|
- Packages: `pkg install py37-matrix-synapse`
|
||||||
|
|
||||||
|
#### OpenBSD
|
||||||
|
|
||||||
|
As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
|
||||||
|
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||||
|
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
||||||
|
and mounting it to `/var/synapse` should be taken into consideration.
|
||||||
|
|
||||||
|
Installing Synapse:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
doas pkg_add synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
#### NixOS
|
||||||
|
|
||||||
|
Robin Lambertz has packaged Synapse for NixOS at:
|
||||||
|
<https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
|
||||||
|
|
||||||
|
## Setting up Synapse
|
||||||
|
|
||||||
|
Once you have installed synapse as above, you will need to configure it.
|
||||||
|
|
||||||
|
### Using PostgreSQL
|
||||||
|
|
||||||
|
By default Synapse uses [SQLite](https://sqlite.org/) and in doing so trades performance for convenience.
|
||||||
|
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||||
|
very light workloads.
|
||||||
|
|
||||||
|
Almost all installations should opt to use [PostgreSQL](https://www.postgresql.org). Advantages include:
|
||||||
|
|
||||||
|
- significant performance improvements due to the superior threading and
|
||||||
|
caching model, smarter query optimiser
|
||||||
|
- allowing the DB to be run on separate hardware
|
||||||
|
|
||||||
|
For information on how to install and use PostgreSQL in Synapse, please see
|
||||||
|
[docs/postgres.md](docs/postgres.md)
|
||||||
|
|
||||||
|
### TLS certificates
|
||||||
|
|
||||||
|
The default configuration exposes a single HTTP port on the local
|
||||||
|
interface: `http://localhost:8008`. It is suitable for local testing,
|
||||||
|
but for any practical use, you will need Synapse's APIs to be served
|
||||||
|
over HTTPS.
|
||||||
|
|
||||||
|
The recommended way to do so is to set up a reverse proxy on port
|
||||||
|
`8448`. You can find documentation on doing so in
|
||||||
|
[docs/reverse_proxy.md](docs/reverse_proxy.md).
|
||||||
|
|
||||||
|
Alternatively, you can configure Synapse to expose an HTTPS port. To do
|
||||||
|
so, you will need to edit `homeserver.yaml`, as follows:
|
||||||
|
|
||||||
|
- First, under the `listeners` section, uncomment the configuration for the
|
||||||
|
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
||||||
|
each line). The relevant lines are like this:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- port: 8448
|
||||||
|
type: http
|
||||||
|
tls: true
|
||||||
|
resources:
|
||||||
|
- names: [client, federation]
|
||||||
|
```
|
||||||
|
|
||||||
|
- You will also need to uncomment the `tls_certificate_path` and
|
||||||
|
`tls_private_key_path` lines under the `TLS` section. You will need to manage
|
||||||
|
provisioning of these certificates yourself — Synapse had built-in ACME
|
||||||
|
support, but the ACMEv1 protocol Synapse implements is deprecated, not
|
||||||
|
allowed by LetsEncrypt for new sites, and will break for existing sites in
|
||||||
|
late 2020. See [ACME.md](docs/ACME.md).
|
||||||
|
|
||||||
|
If you are using your own certificate, be sure to use a `.pem` file that
|
||||||
|
includes the full certificate chain including any intermediate certificates
|
||||||
|
(for instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||||
|
`cert.pem`).
|
||||||
|
|
||||||
|
For a more detailed guide to configuring your server for federation, see
|
||||||
|
[federate.md](docs/federate.md).
|
||||||
|
|
||||||
|
### Client Well-Known URI
|
||||||
|
|
||||||
|
Setting up the client Well-Known URI is optional but if you set it up, it will
|
||||||
|
allow users to enter their full username (e.g. `@user:<server_name>`) into clients
|
||||||
|
which support well-known lookup to automatically configure the homeserver and
|
||||||
|
identity server URLs. This is useful so that users don't have to memorize or think
|
||||||
|
about the actual homeserver URL you are using.
|
||||||
|
|
||||||
|
The URL `https://<server_name>/.well-known/matrix/client` should return JSON in
|
||||||
|
the following format.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"m.homeserver": {
|
||||||
|
"base_url": "https://<matrix.example.com>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
It can optionally contain identity server information as well.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"m.homeserver": {
|
||||||
|
"base_url": "https://<matrix.example.com>"
|
||||||
|
},
|
||||||
|
"m.identity_server": {
|
||||||
|
"base_url": "https://<identity.example.com>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
To work in browser based clients, the file must be served with the appropriate
|
||||||
|
Cross-Origin Resource Sharing (CORS) headers. A recommended value would be
|
||||||
|
`Access-Control-Allow-Origin: *` which would allow all browser based clients to
|
||||||
|
view it.
|
||||||
|
|
||||||
|
In nginx this would be something like:
|
||||||
|
|
||||||
|
```nginx
|
||||||
|
location /.well-known/matrix/client {
|
||||||
|
return 200 '{"m.homeserver": {"base_url": "https://<matrix.example.com>"}}';
|
||||||
|
default_type application/json;
|
||||||
|
add_header Access-Control-Allow-Origin *;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You should also ensure the `public_baseurl` option in `homeserver.yaml` is set
|
||||||
|
correctly. `public_baseurl` should be set to the URL that clients will use to
|
||||||
|
connect to your server. This is the same URL you put for the `m.homeserver`
|
||||||
|
`base_url` above.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
public_baseurl: "https://<matrix.example.com>"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Email
|
||||||
|
|
||||||
|
It is desirable for Synapse to have the capability to send email. This allows
|
||||||
|
Synapse to send password reset emails, send verifications when an email address
|
||||||
|
is added to a user's account, and send email notifications to users when they
|
||||||
|
receive new messages.
|
||||||
|
|
||||||
|
To configure an SMTP server for Synapse, modify the configuration section
|
||||||
|
headed `email`, and be sure to have at least the `smtp_host`, `smtp_port`
|
||||||
|
and `notif_from` fields filled out. You may also need to set `smtp_user`,
|
||||||
|
`smtp_pass`, and `require_transport_security`.
|
||||||
|
|
||||||
|
If email is not configured, password reset, registration and notifications via
|
||||||
|
email will be disabled.
|
||||||
|
|
||||||
|
### Registering a user
|
||||||
|
|
||||||
|
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
|
||||||
|
|
||||||
|
Alternatively you can do so from the command line if you have installed via pip.
|
||||||
|
|
||||||
|
This can be done as follows:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ source ~/synapse/env/bin/activate
|
||||||
|
$ synctl start # if not already running
|
||||||
|
$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
||||||
|
New user localpart: erikj
|
||||||
|
Password:
|
||||||
|
Confirm password:
|
||||||
|
Make admin [no]:
|
||||||
|
Success!
|
||||||
|
```
|
||||||
|
|
||||||
|
This process uses a setting `registration_shared_secret` in
|
||||||
|
`homeserver.yaml`, which is shared between Synapse itself and the
|
||||||
|
`register_new_matrix_user` script. It doesn't matter what it is (a random
|
||||||
|
value is generated by `--generate-config`), but it should be kept secret, as
|
||||||
|
anyone with knowledge of it can register users, including admin accounts,
|
||||||
|
on your server even if `enable_registration` is `false`.
|
||||||
|
|
||||||
|
### Setting up a TURN server
|
||||||
|
|
||||||
|
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||||
|
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
||||||
|
|
||||||
|
### URL previews
|
||||||
|
|
||||||
|
Synapse includes support for previewing URLs, which is disabled by default. To
|
||||||
|
turn it on you must enable the `url_preview_enabled: True` config parameter
|
||||||
|
and explicitly specify the IP ranges that Synapse is not allowed to spider for
|
||||||
|
previewing in the `url_preview_ip_range_blacklist` configuration parameter.
|
||||||
|
This is critical from a security perspective to stop arbitrary Matrix users
|
||||||
|
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||||
|
your loopback and RFC1918 IP addresses are blacklisted.
|
||||||
|
|
||||||
|
This also requires the optional `lxml` python dependency to be installed. This
|
||||||
|
in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
|
||||||
|
means `apt-get install libxml2-dev`, or equivalent for your OS.
|
||||||
|
|
||||||
|
### Troubleshooting Installation
|
||||||
|
|
||||||
|
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
||||||
|
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||||
|
happens, you will have to individually install the dependencies which are
|
||||||
|
failing, e.g.:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pip install twisted
|
||||||
|
```
|
||||||
|
|
||||||
|
If you have any other problems, feel free to ask in
|
||||||
|
[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org).
|
||||||
|
|||||||
53
MANIFEST.in
Normal file
53
MANIFEST.in
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
include synctl
|
||||||
|
include LICENSE
|
||||||
|
include VERSION
|
||||||
|
include *.rst
|
||||||
|
include *.md
|
||||||
|
include demo/README
|
||||||
|
include demo/demo.tls.dh
|
||||||
|
include demo/*.py
|
||||||
|
include demo/*.sh
|
||||||
|
|
||||||
|
recursive-include synapse/storage *.sql
|
||||||
|
recursive-include synapse/storage *.sql.postgres
|
||||||
|
recursive-include synapse/storage *.sql.sqlite
|
||||||
|
recursive-include synapse/storage *.py
|
||||||
|
recursive-include synapse/storage *.txt
|
||||||
|
recursive-include synapse/storage *.md
|
||||||
|
|
||||||
|
recursive-include docs *
|
||||||
|
recursive-include scripts *
|
||||||
|
recursive-include scripts-dev *
|
||||||
|
recursive-include synapse *.pyi
|
||||||
|
recursive-include tests *.py
|
||||||
|
include tests/http/ca.crt
|
||||||
|
include tests/http/ca.key
|
||||||
|
include tests/http/server.key
|
||||||
|
|
||||||
|
recursive-include synapse/res *
|
||||||
|
recursive-include synapse/static *.css
|
||||||
|
recursive-include synapse/static *.gif
|
||||||
|
recursive-include synapse/static *.html
|
||||||
|
recursive-include synapse/static *.js
|
||||||
|
|
||||||
|
exclude .codecov.yml
|
||||||
|
exclude .coveragerc
|
||||||
|
exclude .dockerignore
|
||||||
|
exclude .editorconfig
|
||||||
|
exclude Dockerfile
|
||||||
|
exclude mypy.ini
|
||||||
|
exclude sytest-blacklist
|
||||||
|
exclude test_postgresql.sh
|
||||||
|
|
||||||
|
include pyproject.toml
|
||||||
|
recursive-include changelog.d *
|
||||||
|
|
||||||
|
prune .buildkite
|
||||||
|
prune .circleci
|
||||||
|
prune .github
|
||||||
|
prune contrib
|
||||||
|
prune debian
|
||||||
|
prune demo/etc
|
||||||
|
prune docker
|
||||||
|
prune snap
|
||||||
|
prune stubs
|
||||||
205
README.rst
205
README.rst
@@ -1,6 +1,6 @@
|
|||||||
=========================================================================
|
=========================================================
|
||||||
Synapse |support| |development| |documentation| |license| |pypi| |python|
|
Synapse |support| |development| |license| |pypi| |python|
|
||||||
=========================================================================
|
=========================================================
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
@@ -25,7 +25,7 @@ The overall architecture is::
|
|||||||
|
|
||||||
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
||||||
accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
|
accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
|
||||||
via IRC bridge at irc://irc.libera.chat/matrix.
|
via IRC bridge at irc://irc.freenode.net/matrix.
|
||||||
|
|
||||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||||
is sufficiently stable to be run as an internet-facing service for real usage!
|
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||||
@@ -55,8 +55,11 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
|||||||
generation of fully open and interoperable messaging and VoIP apps for the
|
generation of fully open and interoperable messaging and VoIP apps for the
|
||||||
internet.
|
internet.
|
||||||
|
|
||||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
Synapse is a reference "homeserver" implementation of Matrix from the core
|
||||||
team, written in Python 3/Twisted.
|
development team at matrix.org, written in Python/Twisted. It is intended to
|
||||||
|
showcase the concept of Matrix and let folks see the spec in the context of a
|
||||||
|
codebase and let you run your own homeserver and generally help bootstrap the
|
||||||
|
ecosystem.
|
||||||
|
|
||||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||||
a Matrix homeserver. The homeserver stores all their personal chat history and
|
a Matrix homeserver. The homeserver stores all their personal chat history and
|
||||||
@@ -82,22 +85,16 @@ For support installing or managing Synapse, please join |room|_ (from a matrix.o
|
|||||||
account if necessary) and ask questions there. We do not use GitHub issues for
|
account if necessary) and ask questions there. We do not use GitHub issues for
|
||||||
support requests, only for bug reports and feature requests.
|
support requests, only for bug reports and feature requests.
|
||||||
|
|
||||||
Synapse's documentation is `nicely rendered on GitHub Pages <https://matrix-org.github.io/synapse>`_,
|
|
||||||
with its source available in |docs|_.
|
|
||||||
|
|
||||||
.. |room| replace:: ``#synapse:matrix.org``
|
.. |room| replace:: ``#synapse:matrix.org``
|
||||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
.. _room: https://matrix.to/#/#synapse:matrix.org
|
||||||
|
|
||||||
.. |docs| replace:: ``docs``
|
|
||||||
.. _docs: docs
|
|
||||||
|
|
||||||
Synapse Installation
|
Synapse Installation
|
||||||
====================
|
====================
|
||||||
|
|
||||||
.. _federation:
|
.. _federation:
|
||||||
|
|
||||||
* For details on how to install synapse, see
|
* For details on how to install synapse, see `<INSTALL.md>`_.
|
||||||
`Installation Instructions <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_.
|
|
||||||
* For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
|
* For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
|
||||||
|
|
||||||
|
|
||||||
@@ -109,8 +106,7 @@ from a web client.
|
|||||||
|
|
||||||
Unless you are running a test instance of Synapse on your local machine, in
|
Unless you are running a test instance of Synapse on your local machine, in
|
||||||
general, you will need to enable TLS support before you can successfully
|
general, you will need to enable TLS support before you can successfully
|
||||||
connect from a client: see
|
connect from a client: see `<INSTALL.md#tls-certificates>`_.
|
||||||
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
|
||||||
|
|
||||||
An easy way to get started is to login or register via Element at
|
An easy way to get started is to login or register via Element at
|
||||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||||
@@ -146,55 +142,38 @@ the form of::
|
|||||||
As when logging in, you will need to specify a "Custom server". Specify your
|
As when logging in, you will need to specify a "Custom server". Specify your
|
||||||
desired ``localpart`` in the 'User name' box.
|
desired ``localpart`` in the 'User name' box.
|
||||||
|
|
||||||
Security note
|
ACME setup
|
||||||
|
==========
|
||||||
|
|
||||||
|
For details on having Synapse manage your federation TLS certificates
|
||||||
|
automatically, please see `<docs/ACME.md>`_.
|
||||||
|
|
||||||
|
|
||||||
|
Security Note
|
||||||
=============
|
=============
|
||||||
|
|
||||||
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
Matrix serves raw user generated data in some APIs - specifically the `content
|
||||||
repository endpoints`_.
|
repository endpoints <https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid>`_.
|
||||||
|
|
||||||
.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
|
Whilst we have tried to mitigate against possible XSS attacks (e.g.
|
||||||
|
https://github.com/matrix-org/synapse/pull/1021) we recommend running
|
||||||
|
matrix homeservers on a dedicated domain name, to limit any malicious user generated
|
||||||
|
content served to web browsers a matrix API from being able to attack webapps hosted
|
||||||
|
on the same domain. This is particularly true of sharing a matrix webclient and
|
||||||
|
server on the same domain.
|
||||||
|
|
||||||
Whilst we make a reasonable effort to mitigate against XSS attacks (for
|
See https://github.com/vector-im/riot-web/issues/1977 and
|
||||||
instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
|
https://developer.github.com/changes/2014-04-25-user-content-security for more details.
|
||||||
domain hosting other web applications. This especially applies to sharing
|
|
||||||
the domain with Matrix web clients and other sensitive applications like
|
|
||||||
webmail. See
|
|
||||||
https://developer.github.com/changes/2014-04-25-user-content-security for more
|
|
||||||
information.
|
|
||||||
|
|
||||||
.. _CSP: https://github.com/matrix-org/synapse/pull/1021
|
|
||||||
|
|
||||||
Ideally, the homeserver should not simply be on a different subdomain, but on
|
|
||||||
a completely different `registered domain`_ (also known as top-level site or
|
|
||||||
eTLD+1). This is because `some attacks`_ are still possible as long as the two
|
|
||||||
applications share the same registered domain.
|
|
||||||
|
|
||||||
.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
|
|
||||||
|
|
||||||
.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
|
|
||||||
|
|
||||||
To illustrate this with an example, if your Element Web or other sensitive web
|
|
||||||
application is hosted on ``A.example1.com``, you should ideally host Synapse on
|
|
||||||
``example2.com``. Some amount of protection is offered by hosting on
|
|
||||||
``B.example1.com`` instead, so this is also acceptable in some scenarios.
|
|
||||||
However, you should *not* host your Synapse on ``A.example1.com``.
|
|
||||||
|
|
||||||
Note that all of the above refers exclusively to the domain used in Synapse's
|
|
||||||
``public_baseurl`` setting. In particular, it has no bearing on the domain
|
|
||||||
mentioned in MXIDs hosted on that server.
|
|
||||||
|
|
||||||
Following this advice ensures that even if an XSS is found in Synapse, the
|
|
||||||
impact to other applications will be minimal.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading an existing Synapse
|
Upgrading an existing Synapse
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
The instructions for upgrading synapse are in `the upgrade notes`_.
|
The instructions for upgrading synapse are in `UPGRADE.rst`_.
|
||||||
Please check these instructions as upgrading may require extra steps for some
|
Please check these instructions as upgrading may require extra steps for some
|
||||||
versions of synapse.
|
versions of synapse.
|
||||||
|
|
||||||
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
|
.. _UPGRADE.rst: UPGRADE.rst
|
||||||
|
|
||||||
.. _reverse-proxy:
|
.. _reverse-proxy:
|
||||||
|
|
||||||
@@ -204,9 +183,8 @@ Using a reverse proxy with Synapse
|
|||||||
It is recommended to put a reverse proxy such as
|
It is recommended to put a reverse proxy such as
|
||||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_ or
|
||||||
`HAProxy <https://www.haproxy.org/>`_ or
|
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
||||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
|
||||||
doing so is that it means that you can expose the default https port (443) to
|
doing so is that it means that you can expose the default https port (443) to
|
||||||
Matrix clients without needing to run Synapse with root privileges.
|
Matrix clients without needing to run Synapse with root privileges.
|
||||||
|
|
||||||
@@ -246,7 +224,7 @@ Password reset
|
|||||||
==============
|
==============
|
||||||
|
|
||||||
Users can reset their password through their client. Alternatively, a server admin
|
Users can reset their password through their client. Alternatively, a server admin
|
||||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
|
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
|
||||||
or by directly editing the database as shown below.
|
or by directly editing the database as shown below.
|
||||||
|
|
||||||
First calculate the hash of the new password::
|
First calculate the hash of the new password::
|
||||||
@@ -265,27 +243,11 @@ Then update the ``users`` table in the database::
|
|||||||
Synapse Development
|
Synapse Development
|
||||||
===================
|
===================
|
||||||
|
|
||||||
The best place to get started is our
|
Join our developer community on Matrix: `#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_
|
||||||
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
|
||||||
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
|
||||||
information for synapse developers as well as synapse administrators.
|
|
||||||
|
|
||||||
Developers might be particularly interested in:
|
|
||||||
|
|
||||||
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
|
||||||
* `notes on Synapse's implementation details <https://matrix-org.github.io/synapse/latest/development/internal_documentation/index.html>`_, and
|
|
||||||
* `how we use git <https://matrix-org.github.io/synapse/latest/development/git.html>`_.
|
|
||||||
|
|
||||||
Alongside all that, join our developer community on Matrix:
|
|
||||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
|
||||||
|
|
||||||
|
|
||||||
Quick start
|
|
||||||
-----------
|
|
||||||
|
|
||||||
Before setting up a development environment for synapse, make sure you have the
|
Before setting up a development environment for synapse, make sure you have the
|
||||||
system dependencies (such as the python header files) installed - see
|
system dependencies (such as the python header files) installed - see
|
||||||
`Platform-specific prerequisites <https://matrix-org.github.io/synapse/latest/setup/installation.html#platform-specific-prerequisites>`_.
|
`Installing from source <INSTALL.md#installing-from-source>`_.
|
||||||
|
|
||||||
To check out a synapse for development, clone the git repo into a working
|
To check out a synapse for development, clone the git repo into a working
|
||||||
directory of your choice::
|
directory of your choice::
|
||||||
@@ -293,51 +255,23 @@ directory of your choice::
|
|||||||
git clone https://github.com/matrix-org/synapse.git
|
git clone https://github.com/matrix-org/synapse.git
|
||||||
cd synapse
|
cd synapse
|
||||||
|
|
||||||
Synapse has a number of external dependencies. We maintain a fixed development
|
Synapse has a number of external dependencies, that are easiest
|
||||||
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
|
to install using pip and a virtualenv::
|
||||||
|
|
||||||
pip install --user pipx
|
python3 -m venv ./env
|
||||||
pipx install poetry
|
source ./env/bin/activate
|
||||||
|
pip install -e ".[all,test]"
|
||||||
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
|
||||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
|
|
||||||
for other installation methods.) Then ask poetry to create a virtual environment
|
|
||||||
from the project and install Synapse's dependencies::
|
|
||||||
|
|
||||||
poetry install --extras "all test"
|
|
||||||
|
|
||||||
This will run a process of downloading and installing all the needed
|
This will run a process of downloading and installing all the needed
|
||||||
dependencies into a virtual env.
|
dependencies into a virtual env. If any dependencies fail to install,
|
||||||
|
try installing the failing modules individually::
|
||||||
|
|
||||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
|
pip install -e "module-name"
|
||||||
|
|
||||||
poetry run ./demo/start.sh
|
Once this is done, you may wish to run Synapse's unit tests to
|
||||||
|
|
||||||
(to stop, you can use ``poetry run ./demo/stop.sh``)
|
|
||||||
|
|
||||||
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
|
||||||
for more information.
|
|
||||||
|
|
||||||
If you just want to start a single instance of the app and run it directly::
|
|
||||||
|
|
||||||
# Create the homeserver.yaml config once
|
|
||||||
poetry run synapse_homeserver \
|
|
||||||
--server-name my.domain.name \
|
|
||||||
--config-path homeserver.yaml \
|
|
||||||
--generate-config \
|
|
||||||
--report-stats=[yes|no]
|
|
||||||
|
|
||||||
# Start the app
|
|
||||||
poetry run synapse_homeserver --config-path homeserver.yaml
|
|
||||||
|
|
||||||
|
|
||||||
Running the unit tests
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
After getting up and running, you may wish to run Synapse's unit tests to
|
|
||||||
check that everything is installed correctly::
|
check that everything is installed correctly::
|
||||||
|
|
||||||
poetry run trial tests
|
python -m twisted.trial tests
|
||||||
|
|
||||||
This should end with a 'PASSED' result (note that exact numbers will
|
This should end with a 'PASSED' result (note that exact numbers will
|
||||||
differ)::
|
differ)::
|
||||||
@@ -346,12 +280,29 @@ differ)::
|
|||||||
|
|
||||||
PASSED (skips=15, successes=1322)
|
PASSED (skips=15, successes=1322)
|
||||||
|
|
||||||
For more tips on running the unit tests, like running a specific test or
|
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||||
to see the logging output, see the `CONTRIBUTING doc <CONTRIBUTING.md#run-the-unit-tests>`_.
|
|
||||||
|
./demo/start.sh
|
||||||
|
|
||||||
|
(to stop, you can use `./demo/stop.sh`)
|
||||||
|
|
||||||
|
If you just want to start a single instance of the app and run it directly::
|
||||||
|
|
||||||
|
# Create the homeserver.yaml config once
|
||||||
|
python -m synapse.app.homeserver \
|
||||||
|
--server-name my.domain.name \
|
||||||
|
--config-path homeserver.yaml \
|
||||||
|
--generate-config \
|
||||||
|
--report-stats=[yes|no]
|
||||||
|
|
||||||
|
# Start the app
|
||||||
|
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Running the Integration Tests
|
Running the Integration Tests
|
||||||
-----------------------------
|
=============================
|
||||||
|
|
||||||
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
||||||
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
||||||
@@ -359,17 +310,8 @@ access the API as a Matrix client would. It is able to run Synapse directly from
|
|||||||
the source tree, so installation of the server is not required.
|
the source tree, so installation of the server is not required.
|
||||||
|
|
||||||
Testing with SyTest is recommended for verifying that changes related to the
|
Testing with SyTest is recommended for verifying that changes related to the
|
||||||
Client-Server API are functioning correctly. See the `SyTest installation
|
Client-Server API are functioning correctly. See the `installation instructions
|
||||||
instructions <https://github.com/matrix-org/sytest#installing>`_ for details.
|
<https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||||
|
|
||||||
|
|
||||||
Platform dependencies
|
|
||||||
=====================
|
|
||||||
|
|
||||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
|
||||||
and aims to follow supported upstream versions. See the
|
|
||||||
`<docs/deprecation_policy.md>`_ document for more details.
|
|
||||||
|
|
||||||
|
|
||||||
Troubleshooting
|
Troubleshooting
|
||||||
===============
|
===============
|
||||||
@@ -441,12 +383,7 @@ massive excess of outgoing federation requests (see `discussion
|
|||||||
indicate that your server is also issuing far more outgoing federation
|
indicate that your server is also issuing far more outgoing federation
|
||||||
requests than can be accounted for by your users' activity, this is a
|
requests than can be accounted for by your users' activity, this is a
|
||||||
likely cause. The misbehavior can be worked around by setting
|
likely cause. The misbehavior can be worked around by setting
|
||||||
the following in the Synapse config file:
|
``use_presence: false`` in the Synapse config file.
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
presence:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
People can't accept room invitations from me
|
People can't accept room invitations from me
|
||||||
--------------------------------------------
|
--------------------------------------------
|
||||||
@@ -469,10 +406,6 @@ This is normally caused by a misconfiguration in your reverse-proxy. See
|
|||||||
:alt: (discuss development on #synapse-dev:matrix.org)
|
:alt: (discuss development on #synapse-dev:matrix.org)
|
||||||
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
||||||
|
|
||||||
.. |documentation| image:: https://img.shields.io/badge/documentation-%E2%9C%93-success
|
|
||||||
:alt: (Rendered documentation on GitHub Pages)
|
|
||||||
:target: https://matrix-org.github.io/synapse/latest/
|
|
||||||
|
|
||||||
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
||||||
:alt: (check license in LICENSE file)
|
:alt: (check license in LICENSE file)
|
||||||
:target: LICENSE
|
:target: LICENSE
|
||||||
|
|||||||
1148
UPGRADE.rst
1148
UPGRADE.rst
File diff suppressed because it is too large
Load Diff
39
book.toml
39
book.toml
@@ -1,39 +0,0 @@
|
|||||||
# Documentation for possible options in this file is at
|
|
||||||
# https://rust-lang.github.io/mdBook/format/config.html
|
|
||||||
[book]
|
|
||||||
title = "Synapse"
|
|
||||||
authors = ["The Matrix.org Foundation C.I.C."]
|
|
||||||
language = "en"
|
|
||||||
multilingual = false
|
|
||||||
|
|
||||||
# The directory that documentation files are stored in
|
|
||||||
src = "docs"
|
|
||||||
|
|
||||||
[build]
|
|
||||||
# Prevent markdown pages from being automatically generated when they're
|
|
||||||
# linked to in SUMMARY.md
|
|
||||||
create-missing = false
|
|
||||||
|
|
||||||
[output.html]
|
|
||||||
# The URL visitors will be directed to when they try to edit a page
|
|
||||||
edit-url-template = "https://github.com/matrix-org/synapse/edit/develop/{path}"
|
|
||||||
|
|
||||||
# Remove the numbers that appear before each item in the sidebar, as they can
|
|
||||||
# get quite messy as we nest deeper
|
|
||||||
no-section-label = true
|
|
||||||
|
|
||||||
# The source code URL of the repository
|
|
||||||
git-repository-url = "https://github.com/matrix-org/synapse"
|
|
||||||
|
|
||||||
# The path that the docs are hosted on
|
|
||||||
site-url = "/synapse/"
|
|
||||||
|
|
||||||
# Additional HTML, JS, CSS that's injected into each page of the book.
|
|
||||||
# More information available in docs/website_files/README.md
|
|
||||||
additional-css = [
|
|
||||||
"docs/website_files/table-of-contents.css",
|
|
||||||
"docs/website_files/remove-nav-buttons.css",
|
|
||||||
"docs/website_files/indent-section-headers.css",
|
|
||||||
]
|
|
||||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
|
||||||
theme = "docs/website_files/theme"
|
|
||||||
1
changelog.d/9045.misc
Normal file
1
changelog.d/9045.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add tests to `test_user.UsersListTestCase` for List Users Admin API.
|
||||||
1
changelog.d/9129.misc
Normal file
1
changelog.d/9129.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Various improvements to the federation client.
|
||||||
1
changelog.d/9135.doc
Normal file
1
changelog.d/9135.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add link to Matrix VoIP tester for turn-howto.
|
||||||
1
changelog.d/9163.bugfix
Normal file
1
changelog.d/9163.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled).
|
||||||
1
changelog.d/9176.misc
Normal file
1
changelog.d/9176.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Speed up chain cover calculation when persisting a batch of state events at once.
|
||||||
1
changelog.d/9180.misc
Normal file
1
changelog.d/9180.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add a `long_description_type` to the package metadata.
|
||||||
1
changelog.d/9181.misc
Normal file
1
changelog.d/9181.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Speed up batch insertion when using PostgreSQL.
|
||||||
1
changelog.d/9184.misc
Normal file
1
changelog.d/9184.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Emit an error at startup if different Identity Providers are configured with the same `idp_id`.
|
||||||
1
changelog.d/9188.misc
Normal file
1
changelog.d/9188.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Speed up batch insertion when using PostgreSQL.
|
||||||
1
changelog.d/9189.misc
Normal file
1
changelog.d/9189.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration.
|
||||||
1
changelog.d/9190.misc
Normal file
1
changelog.d/9190.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Improve performance of concurrent use of `StreamIDGenerators`.
|
||||||
1
changelog.d/9191.misc
Normal file
1
changelog.d/9191.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add some missing source directories to the automatic linting script.
|
||||||
1
changelog.d/9193.bugfix
Normal file
1
changelog.d/9193.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.
|
||||||
1
changelog.d/9195.bugfix
Normal file
1
changelog.d/9195.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.
|
||||||
@@ -16,7 +16,6 @@
|
|||||||
|
|
||||||
""" Starts a synapse client console. """
|
""" Starts a synapse client console. """
|
||||||
import argparse
|
import argparse
|
||||||
import binascii
|
|
||||||
import cmd
|
import cmd
|
||||||
import getpass
|
import getpass
|
||||||
import json
|
import json
|
||||||
@@ -25,10 +24,10 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from http import TwistedHttpClient
|
from http import TwistedHttpClient
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
import nacl.encoding
|
||||||
|
import nacl.signing
|
||||||
import urlparse
|
import urlparse
|
||||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
|
||||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
||||||
|
|
||||||
from twisted.internet import defer, reactor, threads
|
from twisted.internet import defer, reactor, threads
|
||||||
@@ -41,6 +40,7 @@ TRUSTED_ID_SERVERS = ["localhost:8001"]
|
|||||||
|
|
||||||
|
|
||||||
class SynapseCmd(cmd.Cmd):
|
class SynapseCmd(cmd.Cmd):
|
||||||
|
|
||||||
"""Basic synapse command-line processor.
|
"""Basic synapse command-line processor.
|
||||||
|
|
||||||
This processes commands from the user and calls the relevant HTTP methods.
|
This processes commands from the user and calls the relevant HTTP methods.
|
||||||
@@ -419,8 +419,8 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
pubKey = None
|
pubKey = None
|
||||||
pubKeyObj = yield self.http_client.do_request("GET", url)
|
pubKeyObj = yield self.http_client.do_request("GET", url)
|
||||||
if "public_key" in pubKeyObj:
|
if "public_key" in pubKeyObj:
|
||||||
pubKey = decode_verify_key_bytes(
|
pubKey = nacl.signing.VerifyKey(
|
||||||
NACL_ED25519, binascii.unhexlify(pubKeyObj["public_key"])
|
pubKeyObj["public_key"], encoder=nacl.encoding.HexEncoder
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print("No public key found in pubkey response!")
|
print("No public key found in pubkey response!")
|
||||||
@@ -718,7 +718,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
method,
|
method,
|
||||||
path,
|
path,
|
||||||
data=None,
|
data=None,
|
||||||
query_params: Optional[dict] = None,
|
query_params={"access_token": None},
|
||||||
alt_text=None,
|
alt_text=None,
|
||||||
):
|
):
|
||||||
""" Runs an HTTP request and pretty prints the output.
|
""" Runs an HTTP request and pretty prints the output.
|
||||||
@@ -729,8 +729,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
data: Raw JSON data if any
|
data: Raw JSON data if any
|
||||||
query_params: dict of query parameters to add to the url
|
query_params: dict of query parameters to add to the url
|
||||||
"""
|
"""
|
||||||
query_params = query_params or {"access_token": None}
|
|
||||||
|
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
if "access_token" in query_params:
|
if "access_token" in query_params:
|
||||||
query_params["access_token"] = self._tok()
|
query_params["access_token"] = self._tok()
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@@ -15,7 +16,6 @@
|
|||||||
import json
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
@@ -23,7 +23,8 @@ from twisted.web.http_headers import Headers
|
|||||||
|
|
||||||
|
|
||||||
class HttpClient:
|
class HttpClient:
|
||||||
"""Interface for talking json over http"""
|
""" Interface for talking json over http
|
||||||
|
"""
|
||||||
|
|
||||||
def put_json(self, url, data):
|
def put_json(self, url, data):
|
||||||
""" Sends the specifed json data using PUT
|
""" Sends the specifed json data using PUT
|
||||||
@@ -85,9 +86,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
defer.returnValue(json.loads(body))
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
def _create_put_request(self, url, json_data, headers_dict={}):
|
||||||
"""Wrapper of _create_request to issue a PUT request"""
|
""" Wrapper of _create_request to issue a PUT request
|
||||||
headers_dict = headers_dict or {}
|
"""
|
||||||
|
|
||||||
if "Content-Type" not in headers_dict:
|
if "Content-Type" not in headers_dict:
|
||||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||||
@@ -96,22 +97,15 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
def _create_get_request(self, url, headers_dict={}):
|
||||||
"""Wrapper of _create_request to issue a GET request"""
|
""" Wrapper of _create_request to issue a GET request
|
||||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
"""
|
||||||
|
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_request(
|
def do_request(
|
||||||
self,
|
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
|
||||||
method,
|
|
||||||
url,
|
|
||||||
data=None,
|
|
||||||
qparams=None,
|
|
||||||
jsonreq=True,
|
|
||||||
headers: Optional[dict] = None,
|
|
||||||
):
|
):
|
||||||
headers = headers or {}
|
|
||||||
|
|
||||||
if qparams:
|
if qparams:
|
||||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||||
|
|
||||||
@@ -132,12 +126,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
defer.returnValue(json.loads(body))
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(
|
def _create_request(self, method, url, producer=None, headers_dict={}):
|
||||||
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
""" Creates and sends a request to the given url
|
||||||
):
|
"""
|
||||||
"""Creates and sends a request to the given url"""
|
|
||||||
headers_dict = headers_dict or {}
|
|
||||||
|
|
||||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||||
|
|
||||||
retries_left = 5
|
retries_left = 5
|
||||||
@@ -194,7 +185,8 @@ class _RawProducer:
|
|||||||
|
|
||||||
|
|
||||||
class _JsonProducer:
|
class _JsonProducer:
|
||||||
"""Used by the twisted http client to create the HTTP body from json"""
|
""" Used by the twisted http client to create the HTTP body from json
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, jsn):
|
def __init__(self, jsn):
|
||||||
self.data = jsn
|
self.data = jsn
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ services:
|
|||||||
# failure
|
# failure
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# See the readme for a full documentation of the environment settings
|
# See the readme for a full documentation of the environment settings
|
||||||
# NOTE: You must edit homeserver.yaml to use postgres, it defaults to sqlite
|
|
||||||
environment:
|
environment:
|
||||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
||||||
volumes:
|
volumes:
|
||||||
@@ -57,7 +56,7 @@ services:
|
|||||||
- POSTGRES_USER=synapse
|
- POSTGRES_USER=synapse
|
||||||
- POSTGRES_PASSWORD=changeme
|
- POSTGRES_PASSWORD=changeme
|
||||||
# ensure the database gets created correctly
|
# ensure the database gets created correctly
|
||||||
# https://matrix-org.github.io/synapse/latest/postgres.html#set-up-database
|
# https://github.com/matrix-org/synapse/blob/master/docs/postgres.md#set-up-database
|
||||||
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
||||||
volumes:
|
volumes:
|
||||||
# You may store the database tables in a local folder..
|
# You may store the database tables in a local folder..
|
||||||
|
|||||||
@@ -1,125 +0,0 @@
|
|||||||
# Setting up Synapse with Workers using Docker Compose
|
|
||||||
|
|
||||||
This directory describes how deploy and manage Synapse and workers via [Docker Compose](https://docs.docker.com/compose/).
|
|
||||||
|
|
||||||
Example worker configuration files can be found [here](workers).
|
|
||||||
|
|
||||||
All examples and snippets assume that your Synapse service is called `synapse` in your Docker Compose file.
|
|
||||||
|
|
||||||
An example Docker Compose file can be found [here](docker-compose.yaml).
|
|
||||||
|
|
||||||
## Worker Service Examples in Docker Compose
|
|
||||||
|
|
||||||
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
|
||||||
|
|
||||||
### Generic Worker Example
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
synapse-generic-worker-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-generic-worker-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
|
||||||
start_period: "5s"
|
|
||||||
interval: "15s"
|
|
||||||
timeout: "5s"
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
|
||||||
# Expose port if required so your reverse proxy can send requests to this worker
|
|
||||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
|
||||||
ports:
|
|
||||||
- 8081:8081
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
### Federation Sender Example
|
|
||||||
|
|
||||||
Please note: The federation sender does not receive REST API calls so no exposed ports are required.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
synapse-federation-sender-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-federation-sender-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
disable: true
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
## `homeserver.yaml` Configuration
|
|
||||||
|
|
||||||
### Enable Redis
|
|
||||||
|
|
||||||
Locate the `redis` section of your `homeserver.yaml` and enable and configure it:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
redis:
|
|
||||||
enabled: true
|
|
||||||
host: redis
|
|
||||||
port: 6379
|
|
||||||
# password: <secret_password>
|
|
||||||
```
|
|
||||||
|
|
||||||
This assumes that your Redis service is called `redis` in your Docker Compose file.
|
|
||||||
|
|
||||||
### Add a replication Listener
|
|
||||||
|
|
||||||
Locate the `listeners` section of your `homeserver.yaml` and add the following replication listener:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
listeners:
|
|
||||||
# Other listeners
|
|
||||||
|
|
||||||
- port: 9093
|
|
||||||
type: http
|
|
||||||
resources:
|
|
||||||
- names: [replication]
|
|
||||||
```
|
|
||||||
|
|
||||||
This listener is used by the workers for replication and is referred to in worker config files using the following settings:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
worker_replication_host: synapse
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
```
|
|
||||||
|
|
||||||
### Add Workers to `instance_map`
|
|
||||||
|
|
||||||
Locate the `instance_map` section of your `homeserver.yaml` and populate it with your workers:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
instance_map:
|
|
||||||
synapse-generic-worker-1: # The worker_name setting in your worker configuration file
|
|
||||||
host: synapse-generic-worker-1 # The name of the worker service in your Docker Compose file
|
|
||||||
port: 8034 # The port assigned to the replication listener in your worker config file
|
|
||||||
synapse-federation-sender-1:
|
|
||||||
host: synapse-federation-sender-1
|
|
||||||
port: 8034
|
|
||||||
```
|
|
||||||
|
|
||||||
### Configure Federation Senders
|
|
||||||
|
|
||||||
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# This will disable federation sending on the main Synapse instance
|
|
||||||
send_federation: false
|
|
||||||
|
|
||||||
federation_sender_instances:
|
|
||||||
- synapse-federation-sender-1 # The worker_name setting in your federation sender worker configuration file
|
|
||||||
```
|
|
||||||
|
|
||||||
## Other Worker types
|
|
||||||
|
|
||||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
networks:
|
|
||||||
backend:
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/var/lib/postgresql/data:/var/lib/postgresql/data:rw
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: synapse
|
|
||||||
POSTGRES_USER: synapse_user
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
POSTGRES_INITDB_ARGS: --encoding=UTF8 --locale=C
|
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
|
|
||||||
synapse:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw
|
|
||||||
ports:
|
|
||||||
- 8008:8008
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
environment:
|
|
||||||
SYNAPSE_CONFIG_DIR: /data
|
|
||||||
SYNAPSE_CONFIG_PATH: /data/homeserver.yaml
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
|
|
||||||
synapse-generic-worker-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-generic-worker-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
|
||||||
start_period: "5s"
|
|
||||||
interval: "15s"
|
|
||||||
timeout: "5s"
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
|
||||||
# Expose port if required so your reverse proxy can send requests to this worker
|
|
||||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
|
||||||
ports:
|
|
||||||
- 8081:8081
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
|
|
||||||
synapse-federation-sender-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-federation-sender-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
disable: true
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
worker_app: synapse.app.federation_sender
|
|
||||||
worker_name: synapse-federation-sender-1
|
|
||||||
|
|
||||||
# The replication listener on the main synapse process.
|
|
||||||
worker_replication_host: synapse
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
|
|
||||||
worker_listeners:
|
|
||||||
- type: http
|
|
||||||
port: 8034
|
|
||||||
resources:
|
|
||||||
- names: [replication]
|
|
||||||
|
|
||||||
worker_log_config: /data/federation_sender.log.config
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
worker_app: synapse.app.generic_worker
|
|
||||||
worker_name: synapse-generic-worker-1
|
|
||||||
|
|
||||||
# The replication listener on the main synapse process.
|
|
||||||
worker_replication_host: synapse
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
|
|
||||||
worker_listeners:
|
|
||||||
- type: http
|
|
||||||
port: 8034
|
|
||||||
resources:
|
|
||||||
- names: [replication]
|
|
||||||
- type: http
|
|
||||||
port: 8081
|
|
||||||
x_forwarded: true
|
|
||||||
resources:
|
|
||||||
- names: [client, federation]
|
|
||||||
|
|
||||||
worker_log_config: /data/worker.log.config
|
|
||||||
166
contrib/experiments/cursesio.py
Normal file
166
contrib/experiments/cursesio.py
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import curses
|
||||||
|
import curses.wrapper
|
||||||
|
from curses.ascii import isprint
|
||||||
|
|
||||||
|
from twisted.internet import reactor
|
||||||
|
|
||||||
|
|
||||||
|
class CursesStdIO:
|
||||||
|
def __init__(self, stdscr, callback=None):
|
||||||
|
self.statusText = "Synapse test app -"
|
||||||
|
self.searchText = ""
|
||||||
|
self.stdscr = stdscr
|
||||||
|
|
||||||
|
self.logLine = ""
|
||||||
|
|
||||||
|
self.callback = callback
|
||||||
|
|
||||||
|
self._setup()
|
||||||
|
|
||||||
|
def _setup(self):
|
||||||
|
self.stdscr.nodelay(1) # Make non blocking
|
||||||
|
|
||||||
|
self.rows, self.cols = self.stdscr.getmaxyx()
|
||||||
|
self.lines = []
|
||||||
|
|
||||||
|
curses.use_default_colors()
|
||||||
|
|
||||||
|
self.paintStatus(self.statusText)
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
def set_callback(self, callback):
|
||||||
|
self.callback = callback
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
""" We want to select on FD 0 """
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def connectionLost(self, reason):
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def print_line(self, text):
|
||||||
|
""" add a line to the internal list of lines"""
|
||||||
|
|
||||||
|
self.lines.append(text)
|
||||||
|
self.redraw()
|
||||||
|
|
||||||
|
def print_log(self, text):
|
||||||
|
self.logLine = text
|
||||||
|
self.redraw()
|
||||||
|
|
||||||
|
def redraw(self):
|
||||||
|
""" method for redisplaying lines
|
||||||
|
based on internal list of lines """
|
||||||
|
|
||||||
|
self.stdscr.clear()
|
||||||
|
self.paintStatus(self.statusText)
|
||||||
|
i = 0
|
||||||
|
index = len(self.lines) - 1
|
||||||
|
while i < (self.rows - 3) and index >= 0:
|
||||||
|
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index], curses.A_NORMAL)
|
||||||
|
i = i + 1
|
||||||
|
index = index - 1
|
||||||
|
|
||||||
|
self.printLogLine(self.logLine)
|
||||||
|
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
def paintStatus(self, text):
|
||||||
|
if len(text) > self.cols:
|
||||||
|
raise RuntimeError("TextTooLongError")
|
||||||
|
|
||||||
|
self.stdscr.addstr(
|
||||||
|
self.rows - 2, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
def printLogLine(self, text):
|
||||||
|
self.stdscr.addstr(
|
||||||
|
0, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
def doRead(self):
|
||||||
|
""" Input is ready! """
|
||||||
|
curses.noecho()
|
||||||
|
c = self.stdscr.getch() # read a character
|
||||||
|
|
||||||
|
if c == curses.KEY_BACKSPACE:
|
||||||
|
self.searchText = self.searchText[:-1]
|
||||||
|
|
||||||
|
elif c == curses.KEY_ENTER or c == 10:
|
||||||
|
text = self.searchText
|
||||||
|
self.searchText = ""
|
||||||
|
|
||||||
|
self.print_line(">> %s" % text)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.callback:
|
||||||
|
self.callback.on_line(text)
|
||||||
|
except Exception as e:
|
||||||
|
self.print_line(str(e))
|
||||||
|
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
elif isprint(c):
|
||||||
|
if len(self.searchText) == self.cols - 2:
|
||||||
|
return
|
||||||
|
self.searchText = self.searchText + chr(c)
|
||||||
|
|
||||||
|
self.stdscr.addstr(
|
||||||
|
self.rows - 1,
|
||||||
|
0,
|
||||||
|
self.searchText + (" " * (self.cols - len(self.searchText) - 2)),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.paintStatus(self.statusText + " %d" % len(self.searchText))
|
||||||
|
self.stdscr.move(self.rows - 1, len(self.searchText))
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
def logPrefix(self):
|
||||||
|
return "CursesStdIO"
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
""" clean up """
|
||||||
|
|
||||||
|
curses.nocbreak()
|
||||||
|
self.stdscr.keypad(0)
|
||||||
|
curses.echo()
|
||||||
|
curses.endwin()
|
||||||
|
|
||||||
|
|
||||||
|
class Callback:
|
||||||
|
def __init__(self, stdio):
|
||||||
|
self.stdio = stdio
|
||||||
|
|
||||||
|
def on_line(self, text):
|
||||||
|
self.stdio.print_line(text)
|
||||||
|
|
||||||
|
|
||||||
|
def main(stdscr):
|
||||||
|
screen = CursesStdIO(stdscr) # create Screen object
|
||||||
|
|
||||||
|
callback = Callback(screen)
|
||||||
|
|
||||||
|
screen.set_callback(callback)
|
||||||
|
|
||||||
|
stdscr.refresh()
|
||||||
|
reactor.addReader(screen)
|
||||||
|
reactor.run()
|
||||||
|
screen.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
curses.wrapper(main)
|
||||||
378
contrib/experiments/test_messaging.py
Normal file
378
contrib/experiments/test_messaging.py
Normal file
@@ -0,0 +1,378 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
""" This is an example of using the server to server implementation to do a
|
||||||
|
basic chat style thing. It accepts commands from stdin and outputs to stdout.
|
||||||
|
|
||||||
|
It assumes that ucids are of the form <user>@<domain>, and uses <domain> as
|
||||||
|
the address of the remote home server to hit.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python test_messaging.py <port>
|
||||||
|
|
||||||
|
Currently assumes the local address is localhost:<port>
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import curses.wrapper
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import cursesio
|
||||||
|
|
||||||
|
from twisted.internet import defer, reactor
|
||||||
|
from twisted.python import log
|
||||||
|
|
||||||
|
from synapse.app.homeserver import SynapseHomeServer
|
||||||
|
from synapse.federation import ReplicationHandler
|
||||||
|
from synapse.federation.units import Pdu
|
||||||
|
from synapse.util import origin_from_ucid
|
||||||
|
|
||||||
|
# from synapse.logging.utils import log_function
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("example")
|
||||||
|
|
||||||
|
|
||||||
|
def excpetion_errback(failure):
|
||||||
|
logging.exception(failure)
|
||||||
|
|
||||||
|
|
||||||
|
class InputOutput:
|
||||||
|
""" This is responsible for basic I/O so that a user can interact with
|
||||||
|
the example app.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, screen, user):
|
||||||
|
self.screen = screen
|
||||||
|
self.user = user
|
||||||
|
|
||||||
|
def set_home_server(self, server):
|
||||||
|
self.server = server
|
||||||
|
|
||||||
|
def on_line(self, line):
|
||||||
|
""" This is where we process commands.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
m = re.match(r"^join (\S+)$", line)
|
||||||
|
if m:
|
||||||
|
# The `sender` wants to join a room.
|
||||||
|
(room_name,) = m.groups()
|
||||||
|
self.print_line("%s joining %s" % (self.user, room_name))
|
||||||
|
self.server.join_room(room_name, self.user, self.user)
|
||||||
|
# self.print_line("OK.")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.match(r"^invite (\S+) (\S+)$", line)
|
||||||
|
if m:
|
||||||
|
# `sender` wants to invite someone to a room
|
||||||
|
room_name, invitee = m.groups()
|
||||||
|
self.print_line("%s invited to %s" % (invitee, room_name))
|
||||||
|
self.server.invite_to_room(room_name, self.user, invitee)
|
||||||
|
# self.print_line("OK.")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.match(r"^send (\S+) (.*)$", line)
|
||||||
|
if m:
|
||||||
|
# `sender` wants to message a room
|
||||||
|
room_name, body = m.groups()
|
||||||
|
self.print_line("%s send to %s" % (self.user, room_name))
|
||||||
|
self.server.send_message(room_name, self.user, body)
|
||||||
|
# self.print_line("OK.")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.match(r"^backfill (\S+)$", line)
|
||||||
|
if m:
|
||||||
|
# we want to backfill a room
|
||||||
|
(room_name,) = m.groups()
|
||||||
|
self.print_line("backfill %s" % room_name)
|
||||||
|
self.server.backfill(room_name)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.print_line("Unrecognized command")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
def print_line(self, text):
|
||||||
|
self.screen.print_line(text)
|
||||||
|
|
||||||
|
def print_log(self, text):
|
||||||
|
self.screen.print_log(text)
|
||||||
|
|
||||||
|
|
||||||
|
class IOLoggerHandler(logging.Handler):
|
||||||
|
def __init__(self, io):
|
||||||
|
logging.Handler.__init__(self)
|
||||||
|
self.io = io
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
if record.levelno < logging.WARN:
|
||||||
|
return
|
||||||
|
|
||||||
|
msg = self.format(record)
|
||||||
|
self.io.print_log(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class Room:
|
||||||
|
""" Used to store (in memory) the current membership state of a room, and
|
||||||
|
which home servers we should send PDUs associated with the room to.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, room_name):
|
||||||
|
self.room_name = room_name
|
||||||
|
self.invited = set()
|
||||||
|
self.participants = set()
|
||||||
|
self.servers = set()
|
||||||
|
|
||||||
|
self.oldest_server = None
|
||||||
|
|
||||||
|
self.have_got_metadata = False
|
||||||
|
|
||||||
|
def add_participant(self, participant):
|
||||||
|
""" Someone has joined the room
|
||||||
|
"""
|
||||||
|
self.participants.add(participant)
|
||||||
|
self.invited.discard(participant)
|
||||||
|
|
||||||
|
server = origin_from_ucid(participant)
|
||||||
|
self.servers.add(server)
|
||||||
|
|
||||||
|
if not self.oldest_server:
|
||||||
|
self.oldest_server = server
|
||||||
|
|
||||||
|
def add_invited(self, invitee):
|
||||||
|
""" Someone has been invited to the room
|
||||||
|
"""
|
||||||
|
self.invited.add(invitee)
|
||||||
|
self.servers.add(origin_from_ucid(invitee))
|
||||||
|
|
||||||
|
|
||||||
|
class HomeServer(ReplicationHandler):
|
||||||
|
""" A very basic home server implentation that allows people to join a
|
||||||
|
room and then invite other people.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, server_name, replication_layer, output):
|
||||||
|
self.server_name = server_name
|
||||||
|
self.replication_layer = replication_layer
|
||||||
|
self.replication_layer.set_handler(self)
|
||||||
|
|
||||||
|
self.joined_rooms = {}
|
||||||
|
|
||||||
|
self.output = output
|
||||||
|
|
||||||
|
def on_receive_pdu(self, pdu):
|
||||||
|
""" We just received a PDU
|
||||||
|
"""
|
||||||
|
pdu_type = pdu.pdu_type
|
||||||
|
|
||||||
|
if pdu_type == "sy.room.message":
|
||||||
|
self._on_message(pdu)
|
||||||
|
elif pdu_type == "sy.room.member" and "membership" in pdu.content:
|
||||||
|
if pdu.content["membership"] == "join":
|
||||||
|
self._on_join(pdu.context, pdu.state_key)
|
||||||
|
elif pdu.content["membership"] == "invite":
|
||||||
|
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
||||||
|
else:
|
||||||
|
self.output.print_line(
|
||||||
|
"#%s (unrec) %s = %s"
|
||||||
|
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||||
|
)
|
||||||
|
|
||||||
|
def _on_message(self, pdu):
|
||||||
|
""" We received a message
|
||||||
|
"""
|
||||||
|
self.output.print_line(
|
||||||
|
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||||
|
)
|
||||||
|
|
||||||
|
def _on_join(self, context, joinee):
|
||||||
|
""" Someone has joined a room, either a remote user or a local user
|
||||||
|
"""
|
||||||
|
room = self._get_or_create_room(context)
|
||||||
|
room.add_participant(joinee)
|
||||||
|
|
||||||
|
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||||
|
|
||||||
|
def _on_invite(self, origin, context, invitee):
|
||||||
|
""" Someone has been invited
|
||||||
|
"""
|
||||||
|
room = self._get_or_create_room(context)
|
||||||
|
room.add_invited(invitee)
|
||||||
|
|
||||||
|
self.output.print_line("#%s %s %s" % (context, invitee, "*** INVITED"))
|
||||||
|
|
||||||
|
if not room.have_got_metadata and origin is not self.server_name:
|
||||||
|
logger.debug("Get room state")
|
||||||
|
self.replication_layer.get_state_for_context(origin, context)
|
||||||
|
room.have_got_metadata = True
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def send_message(self, room_name, sender, body):
|
||||||
|
""" Send a message to a room!
|
||||||
|
"""
|
||||||
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield self.replication_layer.send_pdu(
|
||||||
|
Pdu.create_new(
|
||||||
|
context=room_name,
|
||||||
|
pdu_type="sy.room.message",
|
||||||
|
content={"sender": sender, "body": body},
|
||||||
|
origin=self.server_name,
|
||||||
|
destinations=destinations,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def join_room(self, room_name, sender, joinee):
|
||||||
|
""" Join a room!
|
||||||
|
"""
|
||||||
|
self._on_join(room_name, joinee)
|
||||||
|
|
||||||
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdu = Pdu.create_new(
|
||||||
|
context=room_name,
|
||||||
|
pdu_type="sy.room.member",
|
||||||
|
is_state=True,
|
||||||
|
state_key=joinee,
|
||||||
|
content={"membership": "join"},
|
||||||
|
origin=self.server_name,
|
||||||
|
destinations=destinations,
|
||||||
|
)
|
||||||
|
yield self.replication_layer.send_pdu(pdu)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def invite_to_room(self, room_name, sender, invitee):
|
||||||
|
""" Invite someone to a room!
|
||||||
|
"""
|
||||||
|
self._on_invite(self.server_name, room_name, invitee)
|
||||||
|
|
||||||
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield self.replication_layer.send_pdu(
|
||||||
|
Pdu.create_new(
|
||||||
|
context=room_name,
|
||||||
|
is_state=True,
|
||||||
|
pdu_type="sy.room.member",
|
||||||
|
state_key=invitee,
|
||||||
|
content={"membership": "invite"},
|
||||||
|
origin=self.server_name,
|
||||||
|
destinations=destinations,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
def backfill(self, room_name, limit=5):
|
||||||
|
room = self.joined_rooms.get(room_name)
|
||||||
|
|
||||||
|
if not room:
|
||||||
|
return
|
||||||
|
|
||||||
|
dest = room.oldest_server
|
||||||
|
|
||||||
|
return self.replication_layer.backfill(dest, room_name, limit)
|
||||||
|
|
||||||
|
def _get_room_remote_servers(self, room_name):
|
||||||
|
return list(self.joined_rooms.setdefault(room_name).servers)
|
||||||
|
|
||||||
|
def _get_or_create_room(self, room_name):
|
||||||
|
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||||
|
|
||||||
|
def get_servers_for_context(self, context):
|
||||||
|
return defer.succeed(
|
||||||
|
self.joined_rooms.setdefault(context, Room(context)).servers
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main(stdscr):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("user", type=str)
|
||||||
|
parser.add_argument("-v", "--verbose", action="count")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
user = args.user
|
||||||
|
server_name = origin_from_ucid(user)
|
||||||
|
|
||||||
|
# Set up logging
|
||||||
|
|
||||||
|
root_logger = logging.getLogger()
|
||||||
|
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
|
||||||
|
)
|
||||||
|
if not os.path.exists("logs"):
|
||||||
|
os.makedirs("logs")
|
||||||
|
fh = logging.FileHandler("logs/%s" % user)
|
||||||
|
fh.setFormatter(formatter)
|
||||||
|
|
||||||
|
root_logger.addHandler(fh)
|
||||||
|
root_logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Hack: The only way to get it to stop logging to sys.stderr :(
|
||||||
|
log.theLogPublisher.observers = []
|
||||||
|
observer = log.PythonLoggingObserver()
|
||||||
|
observer.start()
|
||||||
|
|
||||||
|
# Set up synapse server
|
||||||
|
|
||||||
|
curses_stdio = cursesio.CursesStdIO(stdscr)
|
||||||
|
input_output = InputOutput(curses_stdio, user)
|
||||||
|
|
||||||
|
curses_stdio.set_callback(input_output)
|
||||||
|
|
||||||
|
app_hs = SynapseHomeServer(server_name, db_name="dbs/%s" % user)
|
||||||
|
replication = app_hs.get_replication_layer()
|
||||||
|
|
||||||
|
hs = HomeServer(server_name, replication, curses_stdio)
|
||||||
|
|
||||||
|
input_output.set_home_server(hs)
|
||||||
|
|
||||||
|
# Add input_output logger
|
||||||
|
io_logger = IOLoggerHandler(input_output)
|
||||||
|
io_logger.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(io_logger)
|
||||||
|
|
||||||
|
# Start!
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = int(server_name.split(":")[1])
|
||||||
|
except Exception:
|
||||||
|
port = 12345
|
||||||
|
|
||||||
|
app_hs.get_http_server().start_listening(port)
|
||||||
|
|
||||||
|
reactor.addReader(curses_stdio)
|
||||||
|
|
||||||
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
curses.wrapper(main)
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
# Using the Synapse Grafana dashboard
|
# Using the Synapse Grafana dashboard
|
||||||
|
|
||||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
||||||
1. Have your Prometheus scrape your Synapse. https://matrix-org.github.io/synapse/latest/metrics-howto.html
|
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
|
||||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
||||||
3. Set up required recording rules. [contrib/prometheus](../prometheus)
|
3. Set up required recording rules. https://github.com/matrix-org/synapse/tree/master/contrib/prometheus
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,11 @@
|
|||||||
|
import argparse
|
||||||
|
import cgi
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
|
import pydot
|
||||||
|
import urllib2
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@@ -12,25 +20,12 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
|
||||||
import cgi
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import urllib.request
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import pydot
|
def make_name(pdu_id, origin):
|
||||||
|
return "%s@%s" % (pdu_id, origin)
|
||||||
|
|
||||||
|
|
||||||
def make_name(pdu_id: str, origin: str) -> str:
|
def make_graph(pdus, room, filename_prefix):
|
||||||
return f"{pdu_id}@{origin}"
|
|
||||||
|
|
||||||
|
|
||||||
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|
||||||
"""
|
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
|
||||||
topological ordering by querying a homeserver.
|
|
||||||
"""
|
|
||||||
pdu_map = {}
|
pdu_map = {}
|
||||||
node_map = {}
|
node_map = {}
|
||||||
|
|
||||||
@@ -116,10 +111,10 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|||||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
def get_pdus(host: str, room: str) -> List[dict]:
|
def get_pdus(host, room):
|
||||||
transaction = json.loads(
|
transaction = json.loads(
|
||||||
urllib.request.urlopen(
|
urllib2.urlopen(
|
||||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
"http://%s/_matrix/federation/v1/context/%s/" % (host, room)
|
||||||
).read()
|
).read()
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -146,4 +141,4 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
pdus = get_pdus(host, room)
|
pdus = get_pdus(host, room)
|
||||||
|
|
||||||
make_graph(pdus, prefix)
|
make_graph(pdus, room, prefix)
|
||||||
|
|||||||
@@ -14,31 +14,22 @@
|
|||||||
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import cgi
|
||||||
import datetime
|
import datetime
|
||||||
import html
|
|
||||||
import json
|
import json
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
import pydot
|
import pydot
|
||||||
|
|
||||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
from synapse.events import FrozenEvent
|
||||||
from synapse.events import make_event_from_dict
|
|
||||||
from synapse.util.frozenutils import unfreeze
|
from synapse.util.frozenutils import unfreeze
|
||||||
|
|
||||||
|
|
||||||
def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None:
|
def make_graph(db_name, room_id, file_prefix, limit):
|
||||||
"""
|
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
|
||||||
topological ordering by reading from a Synapse SQLite database.
|
|
||||||
"""
|
|
||||||
conn = sqlite3.connect(db_name)
|
conn = sqlite3.connect(db_name)
|
||||||
|
|
||||||
sql = "SELECT room_version FROM rooms WHERE room_id = ?"
|
|
||||||
c = conn.execute(sql, (room_id,))
|
|
||||||
room_version = KNOWN_ROOM_VERSIONS[c.fetchone()[0]]
|
|
||||||
|
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT json, internal_metadata FROM event_json as j "
|
"SELECT json FROM event_json as j "
|
||||||
"INNER JOIN events as e ON e.event_id = j.event_id "
|
"INNER JOIN events as e ON e.event_id = j.event_id "
|
||||||
"WHERE j.room_id = ?"
|
"WHERE j.room_id = ?"
|
||||||
)
|
)
|
||||||
@@ -52,10 +43,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
|
|
||||||
c = conn.execute(sql, args)
|
c = conn.execute(sql, args)
|
||||||
|
|
||||||
events = [
|
events = [FrozenEvent(json.loads(e[0])) for e in c.fetchall()]
|
||||||
make_event_from_dict(json.loads(e[0]), room_version, json.loads(e[1]))
|
|
||||||
for e in c.fetchall()
|
|
||||||
]
|
|
||||||
|
|
||||||
events.sort(key=lambda e: e.depth)
|
events.sort(key=lambda e: e.depth)
|
||||||
|
|
||||||
@@ -96,7 +84,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
"name": event.event_id,
|
"name": event.event_id,
|
||||||
"type": event.type,
|
"type": event.type,
|
||||||
"state_key": event.get("state_key", None),
|
"state_key": event.get("state_key", None),
|
||||||
"content": html.escape(content, quote=True),
|
"content": cgi.escape(content, quote=True),
|
||||||
"time": t,
|
"time": t,
|
||||||
"depth": event.depth,
|
"depth": event.depth,
|
||||||
"state_group": state_group,
|
"state_group": state_group,
|
||||||
@@ -108,11 +96,11 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
for prev_id in event.prev_event_ids():
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except Exception:
|
except Exception:
|
||||||
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
graph.add_node(end_node)
|
graph.add_node(end_node)
|
||||||
@@ -124,7 +112,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
if len(event_ids) <= 1:
|
if len(event_ids) <= 1:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cluster = pydot.Cluster(str(group), label=f"<State Group: {str(group)}>")
|
cluster = pydot.Cluster(str(group), label="<State Group: %s>" % (str(group),))
|
||||||
|
|
||||||
for event_id in event_ids:
|
for event_id in event_ids:
|
||||||
cluster.add_node(node_map[event_id])
|
cluster.add_node(node_map[event_id])
|
||||||
@@ -138,7 +126,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate a PDU graph for a given room by talking "
|
description="Generate a PDU graph for a given room by talking "
|
||||||
"to the given Synapse SQLite file to get the list of PDUs. \n"
|
"to the given homeserver to get the list of PDUs. \n"
|
||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
|||||||
@@ -1,3 +1,13 @@
|
|||||||
|
import argparse
|
||||||
|
import cgi
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
import pydot
|
||||||
|
import simplejson as json
|
||||||
|
|
||||||
|
from synapse.events import FrozenEvent
|
||||||
|
from synapse.util.frozenutils import unfreeze
|
||||||
|
|
||||||
# Copyright 2016 OpenMarket Ltd
|
# Copyright 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@@ -12,35 +22,15 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
|
||||||
import datetime
|
|
||||||
import html
|
|
||||||
import json
|
|
||||||
|
|
||||||
import pydot
|
def make_graph(file_name, room_id, file_prefix, limit):
|
||||||
|
|
||||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
|
||||||
from synapse.events import make_event_from_dict
|
|
||||||
from synapse.util.frozenutils import unfreeze
|
|
||||||
|
|
||||||
|
|
||||||
def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
|
||||||
"""
|
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
|
||||||
topological ordering by reading line-delimited JSON from a file.
|
|
||||||
"""
|
|
||||||
print("Reading lines")
|
print("Reading lines")
|
||||||
with open(file_name) as f:
|
with open(file_name) as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
print("Read lines")
|
print("Read lines")
|
||||||
|
|
||||||
# Figure out the room version, assume the first line is the create event.
|
events = [FrozenEvent(json.loads(line)) for line in lines]
|
||||||
room_version = KNOWN_ROOM_VERSIONS[
|
|
||||||
json.loads(lines[0]).get("content", {}).get("room_version")
|
|
||||||
]
|
|
||||||
|
|
||||||
events = [make_event_from_dict(json.loads(line), room_version) for line in lines]
|
|
||||||
|
|
||||||
print("Loaded events.")
|
print("Loaded events.")
|
||||||
|
|
||||||
@@ -76,8 +66,8 @@ def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
|||||||
content.append(
|
content.append(
|
||||||
"<b>%s</b>: %s,"
|
"<b>%s</b>: %s,"
|
||||||
% (
|
% (
|
||||||
html.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
cgi.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||||
html.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
cgi.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -111,11 +101,11 @@ def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
|||||||
print("Created Nodes")
|
print("Created Nodes")
|
||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
for prev_id in event.prev_event_ids():
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except Exception:
|
except Exception:
|
||||||
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
graph.add_node(end_node)
|
graph.add_node(end_node)
|
||||||
@@ -149,7 +139,8 @@ if __name__ == "__main__":
|
|||||||
)
|
)
|
||||||
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||||
parser.add_argument("event_file")
|
parser.add_argument("event_file")
|
||||||
|
parser.add_argument("room")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
make_graph(args.event_file, args.prefix, args.limit)
|
make_graph(args.event_file, args.room, args.prefix, args.limit)
|
||||||
|
|||||||
298
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
298
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
This is an attempt at bridging matrix clients into a Jitis meet room via Matrix
|
||||||
|
video call. It uses hard-coded xml strings overg XMPP BOSH. It can display one
|
||||||
|
of the streams from the Jitsi bridge until the second lot of SDP comes down and
|
||||||
|
we set the remote SDP at which point the stream ends. Our video never gets to
|
||||||
|
the bridge.
|
||||||
|
|
||||||
|
Requires:
|
||||||
|
npm install jquery jsdom
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
import grequests
|
||||||
|
from BeautifulSoup import BeautifulSoup
|
||||||
|
|
||||||
|
ACCESS_TOKEN = ""
|
||||||
|
|
||||||
|
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||||
|
MYUSERNAME = "@davetest:matrix.org"
|
||||||
|
|
||||||
|
HTTPBIND = "https://meet.jit.si/http-bind"
|
||||||
|
# HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||||
|
# ROOMNAME = "matrix"
|
||||||
|
ROOMNAME = "pibble"
|
||||||
|
|
||||||
|
HOST = "guest.jit.si"
|
||||||
|
# HOST="jitsi.vuc.me"
|
||||||
|
|
||||||
|
TURNSERVER = "turn.guest.jit.si"
|
||||||
|
# TURNSERVER="turn.jitsi.vuc.me"
|
||||||
|
|
||||||
|
ROOMDOMAIN = "meet.jit.si"
|
||||||
|
# ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||||
|
|
||||||
|
|
||||||
|
class TrivialMatrixClient:
|
||||||
|
def __init__(self, access_token):
|
||||||
|
self.token = None
|
||||||
|
self.access_token = access_token
|
||||||
|
|
||||||
|
def getEvent(self):
|
||||||
|
while True:
|
||||||
|
url = (
|
||||||
|
MATRIXBASE
|
||||||
|
+ "events?access_token="
|
||||||
|
+ self.access_token
|
||||||
|
+ "&timeout=60000"
|
||||||
|
)
|
||||||
|
if self.token:
|
||||||
|
url += "&from=" + self.token
|
||||||
|
req = grequests.get(url)
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = json.loads(resps[0].content)
|
||||||
|
print("incoming from matrix", obj)
|
||||||
|
if "end" not in obj:
|
||||||
|
continue
|
||||||
|
self.token = obj["end"]
|
||||||
|
if len(obj["chunk"]):
|
||||||
|
return obj["chunk"][0]
|
||||||
|
|
||||||
|
def joinRoom(self, roomId):
|
||||||
|
url = MATRIXBASE + "rooms/" + roomId + "/join?access_token=" + self.access_token
|
||||||
|
print(url)
|
||||||
|
headers = {"Content-Type": "application/json"}
|
||||||
|
req = grequests.post(url, headers=headers, data="{}")
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = json.loads(resps[0].content)
|
||||||
|
print("response: ", obj)
|
||||||
|
|
||||||
|
def sendEvent(self, roomId, evType, event):
|
||||||
|
url = (
|
||||||
|
MATRIXBASE
|
||||||
|
+ "rooms/"
|
||||||
|
+ roomId
|
||||||
|
+ "/send/"
|
||||||
|
+ evType
|
||||||
|
+ "?access_token="
|
||||||
|
+ self.access_token
|
||||||
|
)
|
||||||
|
print(url)
|
||||||
|
print(json.dumps(event))
|
||||||
|
headers = {"Content-Type": "application/json"}
|
||||||
|
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = json.loads(resps[0].content)
|
||||||
|
print("response: ", obj)
|
||||||
|
|
||||||
|
|
||||||
|
xmppClients = {}
|
||||||
|
|
||||||
|
|
||||||
|
def matrixLoop():
|
||||||
|
while True:
|
||||||
|
ev = matrixCli.getEvent()
|
||||||
|
print(ev)
|
||||||
|
if ev["type"] == "m.room.member":
|
||||||
|
print("membership event")
|
||||||
|
if ev["membership"] == "invite" and ev["state_key"] == MYUSERNAME:
|
||||||
|
roomId = ev["room_id"]
|
||||||
|
print("joining room %s" % (roomId))
|
||||||
|
matrixCli.joinRoom(roomId)
|
||||||
|
elif ev["type"] == "m.room.message":
|
||||||
|
if ev["room_id"] in xmppClients:
|
||||||
|
print("already have a bridge for that user, ignoring")
|
||||||
|
continue
|
||||||
|
print("got message, connecting")
|
||||||
|
xmppClients[ev["room_id"]] = TrivialXmppClient(ev["room_id"], ev["user_id"])
|
||||||
|
gevent.spawn(xmppClients[ev["room_id"]].xmppLoop)
|
||||||
|
elif ev["type"] == "m.call.invite":
|
||||||
|
print("Incoming call")
|
||||||
|
# sdp = ev['content']['offer']['sdp']
|
||||||
|
# print "sdp: %s" % (sdp)
|
||||||
|
# xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||||
|
# gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||||
|
elif ev["type"] == "m.call.answer":
|
||||||
|
print("Call answered")
|
||||||
|
sdp = ev["content"]["answer"]["sdp"]
|
||||||
|
if ev["room_id"] not in xmppClients:
|
||||||
|
print("We didn't have a call for that room")
|
||||||
|
continue
|
||||||
|
# should probably check call ID too
|
||||||
|
xmppCli = xmppClients[ev["room_id"]]
|
||||||
|
xmppCli.sendAnswer(sdp)
|
||||||
|
elif ev["type"] == "m.call.hangup":
|
||||||
|
if ev["room_id"] in xmppClients:
|
||||||
|
xmppClients[ev["room_id"]].stop()
|
||||||
|
del xmppClients[ev["room_id"]]
|
||||||
|
|
||||||
|
|
||||||
|
class TrivialXmppClient:
|
||||||
|
def __init__(self, matrixRoom, userId):
|
||||||
|
self.rid = 0
|
||||||
|
self.matrixRoom = matrixRoom
|
||||||
|
self.userId = userId
|
||||||
|
self.running = True
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.running = False
|
||||||
|
|
||||||
|
def nextRid(self):
|
||||||
|
self.rid += 1
|
||||||
|
return "%d" % (self.rid)
|
||||||
|
|
||||||
|
def sendIq(self, xml):
|
||||||
|
fullXml = (
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>"
|
||||||
|
% (self.nextRid(), self.sid, xml)
|
||||||
|
)
|
||||||
|
# print "\t>>>%s" % (fullXml)
|
||||||
|
return self.xmppPoke(fullXml)
|
||||||
|
|
||||||
|
def xmppPoke(self, xml):
|
||||||
|
headers = {"Content-Type": "application/xml"}
|
||||||
|
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = BeautifulSoup(resps[0].content)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def sendAnswer(self, answer):
|
||||||
|
print("sdp from matrix client", answer)
|
||||||
|
p = subprocess.Popen(
|
||||||
|
["node", "unjingle/unjingle.js", "--sdp"],
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
jingle, out_err = p.communicate(answer)
|
||||||
|
jingle = jingle % {
|
||||||
|
"tojid": self.callfrom,
|
||||||
|
"action": "session-accept",
|
||||||
|
"initiator": self.callfrom,
|
||||||
|
"responder": self.jid,
|
||||||
|
"sid": self.callsid,
|
||||||
|
}
|
||||||
|
print("answer jingle from sdp", jingle)
|
||||||
|
res = self.sendIq(jingle)
|
||||||
|
print("reply from answer: ", res)
|
||||||
|
|
||||||
|
self.ssrcs = {}
|
||||||
|
jingleSoup = BeautifulSoup(jingle)
|
||||||
|
for cont in jingleSoup.iq.jingle.findAll("content"):
|
||||||
|
if cont.description:
|
||||||
|
self.ssrcs[cont["name"]] = cont.description["ssrc"]
|
||||||
|
print("my ssrcs:", self.ssrcs)
|
||||||
|
|
||||||
|
gevent.joinall([gevent.spawn(self.advertiseSsrcs)])
|
||||||
|
|
||||||
|
def advertiseSsrcs(self):
|
||||||
|
time.sleep(7)
|
||||||
|
print("SSRC spammer started")
|
||||||
|
while self.running:
|
||||||
|
ssrcMsg = (
|
||||||
|
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||||
|
% {
|
||||||
|
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||||
|
"nick": self.userId,
|
||||||
|
"assrc": self.ssrcs["audio"],
|
||||||
|
"vssrc": self.ssrcs["video"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
res = self.sendIq(ssrcMsg)
|
||||||
|
print("reply from ssrc announce: ", res)
|
||||||
|
time.sleep(10)
|
||||||
|
|
||||||
|
def xmppLoop(self):
|
||||||
|
self.matrixCallId = time.time()
|
||||||
|
res = self.xmppPoke(
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||||
|
% (self.nextRid(), HOST)
|
||||||
|
)
|
||||||
|
|
||||||
|
print(res)
|
||||||
|
self.sid = res.body["sid"]
|
||||||
|
print("sid %s" % (self.sid))
|
||||||
|
|
||||||
|
res = self.sendIq(
|
||||||
|
"<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>"
|
||||||
|
)
|
||||||
|
|
||||||
|
res = self.xmppPoke(
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||||
|
% (self.nextRid(), self.sid, HOST)
|
||||||
|
)
|
||||||
|
|
||||||
|
res = self.sendIq(
|
||||||
|
"<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>"
|
||||||
|
)
|
||||||
|
print(res)
|
||||||
|
|
||||||
|
self.jid = res.body.iq.bind.jid.string
|
||||||
|
print("jid: %s" % (self.jid))
|
||||||
|
self.shortJid = self.jid.split("-")[0]
|
||||||
|
|
||||||
|
res = self.sendIq(
|
||||||
|
"<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>"
|
||||||
|
)
|
||||||
|
|
||||||
|
# randomthing = res.body.iq['to']
|
||||||
|
# whatsitpart = randomthing.split('-')[0]
|
||||||
|
|
||||||
|
# print "other random bind thing: %s" % (randomthing)
|
||||||
|
|
||||||
|
# advertise preence to the jitsi room, with our nick
|
||||||
|
res = self.sendIq(
|
||||||
|
"<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>"
|
||||||
|
% (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId)
|
||||||
|
)
|
||||||
|
self.muc = {"users": []}
|
||||||
|
for p in res.body.findAll("presence"):
|
||||||
|
u = {}
|
||||||
|
u["shortJid"] = p["from"].split("/")[1]
|
||||||
|
if p.c and p.c.nick:
|
||||||
|
u["nick"] = p.c.nick.string
|
||||||
|
self.muc["users"].append(u)
|
||||||
|
print("muc: ", self.muc)
|
||||||
|
|
||||||
|
# wait for stuff
|
||||||
|
while True:
|
||||||
|
print("waiting...")
|
||||||
|
res = self.sendIq("")
|
||||||
|
print("got from stream: ", res)
|
||||||
|
if res.body.iq:
|
||||||
|
jingles = res.body.iq.findAll("jingle")
|
||||||
|
if len(jingles):
|
||||||
|
self.callfrom = res.body.iq["from"]
|
||||||
|
self.handleInvite(jingles[0])
|
||||||
|
elif "type" in res.body and res.body["type"] == "terminate":
|
||||||
|
self.running = False
|
||||||
|
del xmppClients[self.matrixRoom]
|
||||||
|
return
|
||||||
|
|
||||||
|
def handleInvite(self, jingle):
|
||||||
|
self.initiator = jingle["initiator"]
|
||||||
|
self.callsid = jingle["sid"]
|
||||||
|
p = subprocess.Popen(
|
||||||
|
["node", "unjingle/unjingle.js", "--jingle"],
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
print("raw jingle invite", str(jingle))
|
||||||
|
sdp, out_err = p.communicate(str(jingle))
|
||||||
|
print("transformed remote offer sdp", sdp)
|
||||||
|
inviteEvent = {
|
||||||
|
"offer": {"type": "offer", "sdp": sdp},
|
||||||
|
"call_id": self.matrixCallId,
|
||||||
|
"version": 0,
|
||||||
|
"lifetime": 30000,
|
||||||
|
}
|
||||||
|
matrixCli.sendEvent(self.matrixRoom, "m.call.invite", inviteEvent)
|
||||||
|
|
||||||
|
|
||||||
|
matrixCli = TrivialMatrixClient(ACCESS_TOKEN) # Undefined name
|
||||||
|
|
||||||
|
gevent.joinall([gevent.spawn(matrixLoop)])
|
||||||
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
diff --git a/syweb/webclient/app/components/matrix/matrix-call.js b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||||
|
index 9fbfff0..dc68077 100644
|
||||||
|
--- a/syweb/webclient/app/components/matrix/matrix-call.js
|
||||||
|
+++ b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||||
|
@@ -16,6 +16,45 @@ limitations under the License.
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
+
|
||||||
|
+function sendKeyframe(pc) {
|
||||||
|
+ console.log('sendkeyframe', pc.iceConnectionState);
|
||||||
|
+ if (pc.iceConnectionState !== 'connected') return; // safe...
|
||||||
|
+ pc.setRemoteDescription(
|
||||||
|
+ pc.remoteDescription,
|
||||||
|
+ function () {
|
||||||
|
+ pc.createAnswer(
|
||||||
|
+ function (modifiedAnswer) {
|
||||||
|
+ pc.setLocalDescription(
|
||||||
|
+ modifiedAnswer,
|
||||||
|
+ function () {
|
||||||
|
+ // noop
|
||||||
|
+ },
|
||||||
|
+ function (error) {
|
||||||
|
+ console.log('triggerKeyframe setLocalDescription failed', error);
|
||||||
|
+ messageHandler.showError();
|
||||||
|
+ }
|
||||||
|
+ );
|
||||||
|
+ },
|
||||||
|
+ function (error) {
|
||||||
|
+ console.log('triggerKeyframe createAnswer failed', error);
|
||||||
|
+ messageHandler.showError();
|
||||||
|
+ }
|
||||||
|
+ );
|
||||||
|
+ },
|
||||||
|
+ function (error) {
|
||||||
|
+ console.log('triggerKeyframe setRemoteDescription failed', error);
|
||||||
|
+ messageHandler.showError();
|
||||||
|
+ }
|
||||||
|
+ );
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
var forAllVideoTracksOnStream = function(s, f) {
|
||||||
|
var tracks = s.getVideoTracks();
|
||||||
|
for (var i = 0; i < tracks.length; i++) {
|
||||||
|
@@ -83,7 +122,7 @@ angular.module('MatrixCall', [])
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: we should prevent any calls from being placed or accepted before this has finished
|
||||||
|
- MatrixCall.getTurnServer();
|
||||||
|
+ //MatrixCall.getTurnServer();
|
||||||
|
|
||||||
|
MatrixCall.CALL_TIMEOUT = 60000;
|
||||||
|
MatrixCall.FALLBACK_STUN_SERVER = 'stun:stun.l.google.com:19302';
|
||||||
|
@@ -132,6 +171,22 @@ angular.module('MatrixCall', [])
|
||||||
|
pc.onsignalingstatechange = function() { self.onSignallingStateChanged(); };
|
||||||
|
pc.onicecandidate = function(c) { self.gotLocalIceCandidate(c); };
|
||||||
|
pc.onaddstream = function(s) { self.onAddStream(s); };
|
||||||
|
+
|
||||||
|
+ var datachan = pc.createDataChannel('RTCDataChannel', {
|
||||||
|
+ reliable: false
|
||||||
|
+ });
|
||||||
|
+ console.log("data chan: "+datachan);
|
||||||
|
+ datachan.onopen = function() {
|
||||||
|
+ console.log("data channel open");
|
||||||
|
+ };
|
||||||
|
+ datachan.onmessage = function() {
|
||||||
|
+ console.log("data channel message");
|
||||||
|
+ };
|
||||||
|
+ pc.ondatachannel = function(event) {
|
||||||
|
+ console.log("have data channel");
|
||||||
|
+ event.channel.binaryType = 'blob';
|
||||||
|
+ };
|
||||||
|
+
|
||||||
|
return pc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -200,6 +255,12 @@ angular.module('MatrixCall', [])
|
||||||
|
}, this.msg.lifetime - event.age);
|
||||||
|
};
|
||||||
|
|
||||||
|
+ MatrixCall.prototype.receivedInvite = function(event) {
|
||||||
|
+ console.log("Got second invite for call "+this.call_id);
|
||||||
|
+ this.peerConn.setRemoteDescription(new RTCSessionDescription(this.msg.offer), this.onSetRemoteDescriptionSuccess, this.onSetRemoteDescriptionError);
|
||||||
|
+ };
|
||||||
|
+
|
||||||
|
+
|
||||||
|
// perverse as it may seem, sometimes we want to instantiate a call with a hangup message
|
||||||
|
// (because when getting the state of the room on load, events come in reverse order and
|
||||||
|
// we want to remember that a call has been hung up)
|
||||||
|
@@ -349,7 +410,7 @@ angular.module('MatrixCall', [])
|
||||||
|
'mandatory': {
|
||||||
|
'OfferToReceiveAudio': true,
|
||||||
|
'OfferToReceiveVideo': this.type == 'video'
|
||||||
|
- },
|
||||||
|
+ }
|
||||||
|
};
|
||||||
|
this.peerConn.createAnswer(function(d) { self.createdAnswer(d); }, function(e) {}, constraints);
|
||||||
|
// This can't be in an apply() because it's called by a predecessor call under glare conditions :(
|
||||||
|
@@ -359,8 +420,20 @@ angular.module('MatrixCall', [])
|
||||||
|
MatrixCall.prototype.gotLocalIceCandidate = function(event) {
|
||||||
|
if (event.candidate) {
|
||||||
|
console.log("Got local ICE "+event.candidate.sdpMid+" candidate: "+event.candidate.candidate);
|
||||||
|
- this.sendCandidate(event.candidate);
|
||||||
|
- }
|
||||||
|
+ //this.sendCandidate(event.candidate);
|
||||||
|
+ } else {
|
||||||
|
+ console.log("have all candidates, sending answer");
|
||||||
|
+ var content = {
|
||||||
|
+ version: 0,
|
||||||
|
+ call_id: this.call_id,
|
||||||
|
+ answer: this.peerConn.localDescription
|
||||||
|
+ };
|
||||||
|
+ this.sendEventWithRetry('m.call.answer', content);
|
||||||
|
+ var self = this;
|
||||||
|
+ $rootScope.$apply(function() {
|
||||||
|
+ self.state = 'connecting';
|
||||||
|
+ });
|
||||||
|
+ }
|
||||||
|
}
|
||||||
|
|
||||||
|
MatrixCall.prototype.gotRemoteIceCandidate = function(cand) {
|
||||||
|
@@ -418,15 +491,6 @@ angular.module('MatrixCall', [])
|
||||||
|
console.log("Created answer: "+description);
|
||||||
|
var self = this;
|
||||||
|
this.peerConn.setLocalDescription(description, function() {
|
||||||
|
- var content = {
|
||||||
|
- version: 0,
|
||||||
|
- call_id: self.call_id,
|
||||||
|
- answer: self.peerConn.localDescription
|
||||||
|
- };
|
||||||
|
- self.sendEventWithRetry('m.call.answer', content);
|
||||||
|
- $rootScope.$apply(function() {
|
||||||
|
- self.state = 'connecting';
|
||||||
|
- });
|
||||||
|
}, function() { console.log("Error setting local description!"); } );
|
||||||
|
};
|
||||||
|
|
||||||
|
@@ -448,6 +512,9 @@ angular.module('MatrixCall', [])
|
||||||
|
$rootScope.$apply(function() {
|
||||||
|
self.state = 'connected';
|
||||||
|
self.didConnect = true;
|
||||||
|
+ /*$timeout(function() {
|
||||||
|
+ sendKeyframe(self.peerConn);
|
||||||
|
+ }, 1000);*/
|
||||||
|
});
|
||||||
|
} else if (this.peerConn.iceConnectionState == 'failed') {
|
||||||
|
this.hangup('ice_failed');
|
||||||
|
@@ -518,6 +585,7 @@ angular.module('MatrixCall', [])
|
||||||
|
|
||||||
|
MatrixCall.prototype.onRemoteStreamEnded = function(event) {
|
||||||
|
console.log("Remote stream ended");
|
||||||
|
+ return;
|
||||||
|
var self = this;
|
||||||
|
$rootScope.$apply(function() {
|
||||||
|
self.state = 'ended';
|
||||||
|
diff --git a/syweb/webclient/app/components/matrix/matrix-phone-service.js b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||||
|
index 55dbbf5..272fa27 100644
|
||||||
|
--- a/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||||
|
+++ b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||||
|
@@ -48,6 +48,13 @@ angular.module('matrixPhoneService', [])
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
+ // do we already have an entry for this call ID?
|
||||||
|
+ var existingEntry = matrixPhoneService.allCalls[msg.call_id];
|
||||||
|
+ if (existingEntry) {
|
||||||
|
+ existingEntry.receivedInvite(msg);
|
||||||
|
+ return;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
var call = undefined;
|
||||||
|
if (!isLive) {
|
||||||
|
// if this event wasn't live then this call may already be over
|
||||||
|
@@ -108,7 +115,7 @@ angular.module('matrixPhoneService', [])
|
||||||
|
call.hangup();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
- $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||||
|
+ $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||||
|
}
|
||||||
|
} else if (event.type == 'm.call.answer') {
|
||||||
|
var call = matrixPhoneService.allCalls[msg.call_id];
|
||||||
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
@@ -0,0 +1,712 @@
|
|||||||
|
/* jshint -W117 */
|
||||||
|
// SDP STUFF
|
||||||
|
function SDP(sdp) {
|
||||||
|
this.media = sdp.split('\r\nm=');
|
||||||
|
for (var i = 1; i < this.media.length; i++) {
|
||||||
|
this.media[i] = 'm=' + this.media[i];
|
||||||
|
if (i != this.media.length - 1) {
|
||||||
|
this.media[i] += '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.session = this.media.shift() + '\r\n';
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.SDP = SDP;
|
||||||
|
|
||||||
|
var jsdom = require("jsdom");
|
||||||
|
var window = jsdom.jsdom().parentWindow;
|
||||||
|
var $ = require('jquery')(window);
|
||||||
|
|
||||||
|
var SDPUtil = require('./strophe.jingle.sdp.util.js').SDPUtil;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns map of MediaChannel mapped per channel idx.
|
||||||
|
*/
|
||||||
|
SDP.prototype.getMediaSsrcMap = function() {
|
||||||
|
var self = this;
|
||||||
|
var media_ssrcs = {};
|
||||||
|
for (channelNum = 0; channelNum < self.media.length; channelNum++) {
|
||||||
|
modified = true;
|
||||||
|
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc:');
|
||||||
|
var type = SDPUtil.parse_mid(SDPUtil.find_line(self.media[channelNum], 'a=mid:'));
|
||||||
|
var channel = new MediaChannel(channelNum, type);
|
||||||
|
media_ssrcs[channelNum] = channel;
|
||||||
|
tmp.forEach(function (line) {
|
||||||
|
var linessrc = line.substring(7).split(' ')[0];
|
||||||
|
// allocate new ChannelSsrc
|
||||||
|
if(!channel.ssrcs[linessrc]) {
|
||||||
|
channel.ssrcs[linessrc] = new ChannelSsrc(linessrc, type);
|
||||||
|
}
|
||||||
|
channel.ssrcs[linessrc].lines.push(line);
|
||||||
|
});
|
||||||
|
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc-group:');
|
||||||
|
tmp.forEach(function(line){
|
||||||
|
var semantics = line.substr(0, idx).substr(13);
|
||||||
|
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||||
|
if (ssrcs.length != 0) {
|
||||||
|
var ssrcGroup = new ChannelSsrcGroup(semantics, ssrcs);
|
||||||
|
channel.ssrcGroups.push(ssrcGroup);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return media_ssrcs;
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns <tt>true</tt> if this SDP contains given SSRC.
|
||||||
|
* @param ssrc the ssrc to check.
|
||||||
|
* @returns {boolean} <tt>true</tt> if this SDP contains given SSRC.
|
||||||
|
*/
|
||||||
|
SDP.prototype.containsSSRC = function(ssrc) {
|
||||||
|
var channels = this.getMediaSsrcMap();
|
||||||
|
var contains = false;
|
||||||
|
Object.keys(channels).forEach(function(chNumber){
|
||||||
|
var channel = channels[chNumber];
|
||||||
|
//console.log("Check", channel, ssrc);
|
||||||
|
if(Object.keys(channel.ssrcs).indexOf(ssrc) != -1){
|
||||||
|
contains = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return contains;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns map of MediaChannel that contains only media not contained in <tt>otherSdp</tt>. Mapped by channel idx.
|
||||||
|
* @param otherSdp the other SDP to check ssrc with.
|
||||||
|
*/
|
||||||
|
SDP.prototype.getNewMedia = function(otherSdp) {
|
||||||
|
|
||||||
|
// this could be useful in Array.prototype.
|
||||||
|
function arrayEquals(array) {
|
||||||
|
// if the other array is a falsy value, return
|
||||||
|
if (!array)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
// compare lengths - can save a lot of time
|
||||||
|
if (this.length != array.length)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
for (var i = 0, l=this.length; i < l; i++) {
|
||||||
|
// Check if we have nested arrays
|
||||||
|
if (this[i] instanceof Array && array[i] instanceof Array) {
|
||||||
|
// recurse into the nested arrays
|
||||||
|
if (!this[i].equals(array[i]))
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else if (this[i] != array[i]) {
|
||||||
|
// Warning - two different object instances will never be equal: {x:20} != {x:20}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
var myMedia = this.getMediaSsrcMap();
|
||||||
|
var othersMedia = otherSdp.getMediaSsrcMap();
|
||||||
|
var newMedia = {};
|
||||||
|
Object.keys(othersMedia).forEach(function(channelNum) {
|
||||||
|
var myChannel = myMedia[channelNum];
|
||||||
|
var othersChannel = othersMedia[channelNum];
|
||||||
|
if(!myChannel && othersChannel) {
|
||||||
|
// Add whole channel
|
||||||
|
newMedia[channelNum] = othersChannel;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Look for new ssrcs accross the channel
|
||||||
|
Object.keys(othersChannel.ssrcs).forEach(function(ssrc) {
|
||||||
|
if(Object.keys(myChannel.ssrcs).indexOf(ssrc) === -1) {
|
||||||
|
// Allocate channel if we've found ssrc that doesn't exist in our channel
|
||||||
|
if(!newMedia[channelNum]){
|
||||||
|
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||||
|
}
|
||||||
|
newMedia[channelNum].ssrcs[ssrc] = othersChannel.ssrcs[ssrc];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Look for new ssrc groups across the channels
|
||||||
|
othersChannel.ssrcGroups.forEach(function(otherSsrcGroup){
|
||||||
|
|
||||||
|
// try to match the other ssrc-group with an ssrc-group of ours
|
||||||
|
var matched = false;
|
||||||
|
for (var i = 0; i < myChannel.ssrcGroups.length; i++) {
|
||||||
|
var mySsrcGroup = myChannel.ssrcGroups[i];
|
||||||
|
if (otherSsrcGroup.semantics == mySsrcGroup.semantics
|
||||||
|
&& arrayEquals.apply(otherSsrcGroup.ssrcs, [mySsrcGroup.ssrcs])) {
|
||||||
|
|
||||||
|
matched = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!matched) {
|
||||||
|
// Allocate channel if we've found an ssrc-group that doesn't
|
||||||
|
// exist in our channel
|
||||||
|
|
||||||
|
if(!newMedia[channelNum]){
|
||||||
|
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||||
|
}
|
||||||
|
newMedia[channelNum].ssrcGroups.push(otherSsrcGroup);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return newMedia;
|
||||||
|
};
|
||||||
|
|
||||||
|
// remove iSAC and CN from SDP
|
||||||
|
SDP.prototype.mangle = function () {
|
||||||
|
var i, j, mline, lines, rtpmap, newdesc;
|
||||||
|
for (i = 0; i < this.media.length; i++) {
|
||||||
|
lines = this.media[i].split('\r\n');
|
||||||
|
lines.pop(); // remove empty last element
|
||||||
|
mline = SDPUtil.parse_mline(lines.shift());
|
||||||
|
if (mline.media != 'audio')
|
||||||
|
continue;
|
||||||
|
newdesc = '';
|
||||||
|
mline.fmt.length = 0;
|
||||||
|
for (j = 0; j < lines.length; j++) {
|
||||||
|
if (lines[j].substr(0, 9) == 'a=rtpmap:') {
|
||||||
|
rtpmap = SDPUtil.parse_rtpmap(lines[j]);
|
||||||
|
if (rtpmap.name == 'CN' || rtpmap.name == 'ISAC')
|
||||||
|
continue;
|
||||||
|
mline.fmt.push(rtpmap.id);
|
||||||
|
newdesc += lines[j] + '\r\n';
|
||||||
|
} else {
|
||||||
|
newdesc += lines[j] + '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.media[i] = SDPUtil.build_mline(mline) + '\r\n';
|
||||||
|
this.media[i] += newdesc;
|
||||||
|
}
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
};
|
||||||
|
|
||||||
|
// remove lines matching prefix from session section
|
||||||
|
SDP.prototype.removeSessionLines = function(prefix) {
|
||||||
|
var self = this;
|
||||||
|
var lines = SDPUtil.find_lines(this.session, prefix);
|
||||||
|
lines.forEach(function(line) {
|
||||||
|
self.session = self.session.replace(line + '\r\n', '');
|
||||||
|
});
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
return lines;
|
||||||
|
}
|
||||||
|
// remove lines matching prefix from a media section specified by mediaindex
|
||||||
|
// TODO: non-numeric mediaindex could match mid
|
||||||
|
SDP.prototype.removeMediaLines = function(mediaindex, prefix) {
|
||||||
|
var self = this;
|
||||||
|
var lines = SDPUtil.find_lines(this.media[mediaindex], prefix);
|
||||||
|
lines.forEach(function(line) {
|
||||||
|
self.media[mediaindex] = self.media[mediaindex].replace(line + '\r\n', '');
|
||||||
|
});
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
return lines;
|
||||||
|
}
|
||||||
|
|
||||||
|
// add content's to a jingle element
|
||||||
|
SDP.prototype.toJingle = function (elem, thecreator) {
|
||||||
|
var i, j, k, mline, ssrc, rtpmap, tmp, line, lines;
|
||||||
|
var self = this;
|
||||||
|
// new bundle plan
|
||||||
|
if (SDPUtil.find_line(this.session, 'a=group:')) {
|
||||||
|
lines = SDPUtil.find_lines(this.session, 'a=group:');
|
||||||
|
for (i = 0; i < lines.length; i++) {
|
||||||
|
tmp = lines[i].split(' ');
|
||||||
|
var semantics = tmp.shift().substr(8);
|
||||||
|
elem.c('group', {xmlns: 'urn:xmpp:jingle:apps:grouping:0', semantics:semantics});
|
||||||
|
for (j = 0; j < tmp.length; j++) {
|
||||||
|
elem.c('content', {name: tmp[j]}).up();
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// old bundle plan, to be removed
|
||||||
|
var bundle = [];
|
||||||
|
if (SDPUtil.find_line(this.session, 'a=group:BUNDLE')) {
|
||||||
|
bundle = SDPUtil.find_line(this.session, 'a=group:BUNDLE ').split(' ');
|
||||||
|
bundle.shift();
|
||||||
|
}
|
||||||
|
for (i = 0; i < this.media.length; i++) {
|
||||||
|
mline = SDPUtil.parse_mline(this.media[i].split('\r\n')[0]);
|
||||||
|
if (!(mline.media === 'audio' ||
|
||||||
|
mline.media === 'video' ||
|
||||||
|
mline.media === 'application'))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=ssrc:')) {
|
||||||
|
ssrc = SDPUtil.find_line(this.media[i], 'a=ssrc:').substring(7).split(' ')[0]; // take the first
|
||||||
|
} else {
|
||||||
|
ssrc = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
elem.c('content', {creator: thecreator, name: mline.media});
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=mid:')) {
|
||||||
|
// prefer identifier from a=mid if present
|
||||||
|
var mid = SDPUtil.parse_mid(SDPUtil.find_line(this.media[i], 'a=mid:'));
|
||||||
|
elem.attrs({ name: mid });
|
||||||
|
|
||||||
|
// old BUNDLE plan, to be removed
|
||||||
|
if (bundle.indexOf(mid) !== -1) {
|
||||||
|
elem.c('bundle', {xmlns: 'http://estos.de/ns/bundle'}).up();
|
||||||
|
bundle.splice(bundle.indexOf(mid), 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=rtpmap:').length)
|
||||||
|
{
|
||||||
|
elem.c('description',
|
||||||
|
{xmlns: 'urn:xmpp:jingle:apps:rtp:1',
|
||||||
|
media: mline.media });
|
||||||
|
if (ssrc) {
|
||||||
|
elem.attrs({ssrc: ssrc});
|
||||||
|
}
|
||||||
|
for (j = 0; j < mline.fmt.length; j++) {
|
||||||
|
rtpmap = SDPUtil.find_line(this.media[i], 'a=rtpmap:' + mline.fmt[j]);
|
||||||
|
elem.c('payload-type', SDPUtil.parse_rtpmap(rtpmap));
|
||||||
|
// put any 'a=fmtp:' + mline.fmt[j] lines into <param name=foo value=bar/>
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j])) {
|
||||||
|
tmp = SDPUtil.parse_fmtp(SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j]));
|
||||||
|
for (k = 0; k < tmp.length; k++) {
|
||||||
|
elem.c('parameter', tmp[k]).up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.RtcpFbToJingle(i, elem, mline.fmt[j]); // XEP-0293 -- map a=rtcp-fb
|
||||||
|
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=crypto:', this.session)) {
|
||||||
|
elem.c('encryption', {required: 1});
|
||||||
|
var crypto = SDPUtil.find_lines(this.media[i], 'a=crypto:', this.session);
|
||||||
|
crypto.forEach(function(line) {
|
||||||
|
elem.c('crypto', SDPUtil.parse_crypto(line)).up();
|
||||||
|
});
|
||||||
|
elem.up(); // end of encryption
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ssrc) {
|
||||||
|
// new style mapping
|
||||||
|
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||||
|
// FIXME: group by ssrc and support multiple different ssrcs
|
||||||
|
var ssrclines = SDPUtil.find_lines(this.media[i], 'a=ssrc:');
|
||||||
|
ssrclines.forEach(function(line) {
|
||||||
|
idx = line.indexOf(' ');
|
||||||
|
var linessrc = line.substr(0, idx).substr(7);
|
||||||
|
if (linessrc != ssrc) {
|
||||||
|
elem.up();
|
||||||
|
ssrc = linessrc;
|
||||||
|
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||||
|
}
|
||||||
|
var kv = line.substr(idx + 1);
|
||||||
|
elem.c('parameter');
|
||||||
|
if (kv.indexOf(':') == -1) {
|
||||||
|
elem.attrs({ name: kv });
|
||||||
|
} else {
|
||||||
|
elem.attrs({ name: kv.split(':', 2)[0] });
|
||||||
|
elem.attrs({ value: kv.split(':', 2)[1] });
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
});
|
||||||
|
elem.up();
|
||||||
|
|
||||||
|
// old proprietary mapping, to be removed at some point
|
||||||
|
tmp = SDPUtil.parse_ssrc(this.media[i]);
|
||||||
|
tmp.xmlns = 'http://estos.de/ns/ssrc';
|
||||||
|
tmp.ssrc = ssrc;
|
||||||
|
elem.c('ssrc', tmp).up(); // ssrc is part of description
|
||||||
|
|
||||||
|
// XEP-0339 handle ssrc-group attributes
|
||||||
|
var ssrc_group_lines = SDPUtil.find_lines(this.media[i], 'a=ssrc-group:');
|
||||||
|
ssrc_group_lines.forEach(function(line) {
|
||||||
|
idx = line.indexOf(' ');
|
||||||
|
var semantics = line.substr(0, idx).substr(13);
|
||||||
|
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||||
|
if (ssrcs.length != 0) {
|
||||||
|
elem.c('ssrc-group', { semantics: semantics, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||||
|
ssrcs.forEach(function(ssrc) {
|
||||||
|
elem.c('source', { ssrc: ssrc })
|
||||||
|
.up();
|
||||||
|
});
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=rtcp-mux')) {
|
||||||
|
elem.c('rtcp-mux').up();
|
||||||
|
}
|
||||||
|
|
||||||
|
// XEP-0293 -- map a=rtcp-fb:*
|
||||||
|
this.RtcpFbToJingle(i, elem, '*');
|
||||||
|
|
||||||
|
// XEP-0294
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=extmap:')) {
|
||||||
|
lines = SDPUtil.find_lines(this.media[i], 'a=extmap:');
|
||||||
|
for (j = 0; j < lines.length; j++) {
|
||||||
|
tmp = SDPUtil.parse_extmap(lines[j]);
|
||||||
|
elem.c('rtp-hdrext', { xmlns: 'urn:xmpp:jingle:apps:rtp:rtp-hdrext:0',
|
||||||
|
uri: tmp.uri,
|
||||||
|
id: tmp.value });
|
||||||
|
if (tmp.hasOwnProperty('direction')) {
|
||||||
|
switch (tmp.direction) {
|
||||||
|
case 'sendonly':
|
||||||
|
elem.attrs({senders: 'responder'});
|
||||||
|
break;
|
||||||
|
case 'recvonly':
|
||||||
|
elem.attrs({senders: 'initiator'});
|
||||||
|
break;
|
||||||
|
case 'sendrecv':
|
||||||
|
elem.attrs({senders: 'both'});
|
||||||
|
break;
|
||||||
|
case 'inactive':
|
||||||
|
elem.attrs({senders: 'none'});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO: handle params
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elem.up(); // end of description
|
||||||
|
}
|
||||||
|
|
||||||
|
// map ice-ufrag/pwd, dtls fingerprint, candidates
|
||||||
|
this.TransportToJingle(i, elem);
|
||||||
|
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=sendrecv', this.session)) {
|
||||||
|
elem.attrs({senders: 'both'});
|
||||||
|
} else if (SDPUtil.find_line(this.media[i], 'a=sendonly', this.session)) {
|
||||||
|
elem.attrs({senders: 'initiator'});
|
||||||
|
} else if (SDPUtil.find_line(this.media[i], 'a=recvonly', this.session)) {
|
||||||
|
elem.attrs({senders: 'responder'});
|
||||||
|
} else if (SDPUtil.find_line(this.media[i], 'a=inactive', this.session)) {
|
||||||
|
elem.attrs({senders: 'none'});
|
||||||
|
}
|
||||||
|
if (mline.port == '0') {
|
||||||
|
// estos hack to reject an m-line
|
||||||
|
elem.attrs({senders: 'rejected'});
|
||||||
|
}
|
||||||
|
elem.up(); // end of content
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
return elem;
|
||||||
|
};
|
||||||
|
|
||||||
|
SDP.prototype.TransportToJingle = function (mediaindex, elem) {
|
||||||
|
var i = mediaindex;
|
||||||
|
var tmp;
|
||||||
|
var self = this;
|
||||||
|
elem.c('transport');
|
||||||
|
|
||||||
|
// XEP-0343 DTLS/SCTP
|
||||||
|
if (SDPUtil.find_line(this.media[mediaindex], 'a=sctpmap:').length)
|
||||||
|
{
|
||||||
|
var sctpmap = SDPUtil.find_line(
|
||||||
|
this.media[i], 'a=sctpmap:', self.session);
|
||||||
|
if (sctpmap)
|
||||||
|
{
|
||||||
|
var sctpAttrs = SDPUtil.parse_sctpmap(sctpmap);
|
||||||
|
elem.c('sctpmap',
|
||||||
|
{
|
||||||
|
xmlns: 'urn:xmpp:jingle:transports:dtls-sctp:1',
|
||||||
|
number: sctpAttrs[0], /* SCTP port */
|
||||||
|
protocol: sctpAttrs[1], /* protocol */
|
||||||
|
});
|
||||||
|
// Optional stream count attribute
|
||||||
|
if (sctpAttrs.length > 2)
|
||||||
|
elem.attrs({ streams: sctpAttrs[2]});
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// XEP-0320
|
||||||
|
var fingerprints = SDPUtil.find_lines(this.media[mediaindex], 'a=fingerprint:', this.session);
|
||||||
|
fingerprints.forEach(function(line) {
|
||||||
|
tmp = SDPUtil.parse_fingerprint(line);
|
||||||
|
tmp.xmlns = 'urn:xmpp:jingle:apps:dtls:0';
|
||||||
|
elem.c('fingerprint').t(tmp.fingerprint);
|
||||||
|
delete tmp.fingerprint;
|
||||||
|
line = SDPUtil.find_line(self.media[mediaindex], 'a=setup:', self.session);
|
||||||
|
if (line) {
|
||||||
|
tmp.setup = line.substr(8);
|
||||||
|
}
|
||||||
|
elem.attrs(tmp);
|
||||||
|
elem.up(); // end of fingerprint
|
||||||
|
});
|
||||||
|
tmp = SDPUtil.iceparams(this.media[mediaindex], this.session);
|
||||||
|
if (tmp) {
|
||||||
|
tmp.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1';
|
||||||
|
elem.attrs(tmp);
|
||||||
|
// XEP-0176
|
||||||
|
if (SDPUtil.find_line(this.media[mediaindex], 'a=candidate:', this.session)) { // add any a=candidate lines
|
||||||
|
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=candidate:', this.session);
|
||||||
|
lines.forEach(function (line) {
|
||||||
|
elem.c('candidate', SDPUtil.candidateToJingle(line)).up();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elem.up(); // end of transport
|
||||||
|
}
|
||||||
|
|
||||||
|
SDP.prototype.RtcpFbToJingle = function (mediaindex, elem, payloadtype) { // XEP-0293
|
||||||
|
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=rtcp-fb:' + payloadtype);
|
||||||
|
lines.forEach(function (line) {
|
||||||
|
var tmp = SDPUtil.parse_rtcpfb(line);
|
||||||
|
if (tmp.type == 'trr-int') {
|
||||||
|
elem.c('rtcp-fb-trr-int', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', value: tmp.params[0]});
|
||||||
|
elem.up();
|
||||||
|
} else {
|
||||||
|
elem.c('rtcp-fb', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', type: tmp.type});
|
||||||
|
if (tmp.params.length > 0) {
|
||||||
|
elem.attrs({'subtype': tmp.params[0]});
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
SDP.prototype.RtcpFbFromJingle = function (elem, payloadtype) { // XEP-0293
|
||||||
|
var media = '';
|
||||||
|
var tmp = elem.find('>rtcp-fb-trr-int[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||||
|
if (tmp.length) {
|
||||||
|
media += 'a=rtcp-fb:' + '*' + ' ' + 'trr-int' + ' ';
|
||||||
|
if (tmp.attr('value')) {
|
||||||
|
media += tmp.attr('value');
|
||||||
|
} else {
|
||||||
|
media += '0';
|
||||||
|
}
|
||||||
|
media += '\r\n';
|
||||||
|
}
|
||||||
|
tmp = elem.find('>rtcp-fb[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||||
|
tmp.each(function () {
|
||||||
|
media += 'a=rtcp-fb:' + payloadtype + ' ' + $(this).attr('type');
|
||||||
|
if ($(this).attr('subtype')) {
|
||||||
|
media += ' ' + $(this).attr('subtype');
|
||||||
|
}
|
||||||
|
media += '\r\n';
|
||||||
|
});
|
||||||
|
return media;
|
||||||
|
};
|
||||||
|
|
||||||
|
// construct an SDP from a jingle stanza
|
||||||
|
SDP.prototype.fromJingle = function (jingle) {
|
||||||
|
var self = this;
|
||||||
|
this.raw = 'v=0\r\n' +
|
||||||
|
'o=- ' + '1923518516' + ' 2 IN IP4 0.0.0.0\r\n' +// FIXME
|
||||||
|
's=-\r\n' +
|
||||||
|
't=0 0\r\n';
|
||||||
|
// http://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-04#section-8
|
||||||
|
if ($(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').length) {
|
||||||
|
$(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').each(function (idx, group) {
|
||||||
|
var contents = $(group).find('>content').map(function (idx, content) {
|
||||||
|
return content.getAttribute('name');
|
||||||
|
}).get();
|
||||||
|
if (contents.length > 0) {
|
||||||
|
self.raw += 'a=group:' + (group.getAttribute('semantics') || group.getAttribute('type')) + ' ' + contents.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else if ($(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').length) {
|
||||||
|
// temporary namespace, not to be used. to be removed soon.
|
||||||
|
$(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').each(function (idx, group) {
|
||||||
|
var contents = $(group).find('>content').map(function (idx, content) {
|
||||||
|
return content.getAttribute('name');
|
||||||
|
}).get();
|
||||||
|
if (group.getAttribute('type') !== null && contents.length > 0) {
|
||||||
|
self.raw += 'a=group:' + group.getAttribute('type') + ' ' + contents.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// for backward compability, to be removed soon
|
||||||
|
// assume all contents are in the same bundle group, can be improved upon later
|
||||||
|
var bundle = $(jingle).find('>content').filter(function (idx, content) {
|
||||||
|
//elem.c('bundle', {xmlns:'http://estos.de/ns/bundle'});
|
||||||
|
return $(content).find('>bundle').length > 0;
|
||||||
|
}).map(function (idx, content) {
|
||||||
|
return content.getAttribute('name');
|
||||||
|
}).get();
|
||||||
|
if (bundle.length) {
|
||||||
|
this.raw += 'a=group:BUNDLE ' + bundle.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.session = this.raw;
|
||||||
|
jingle.find('>content').each(function () {
|
||||||
|
var m = self.jingle2media($(this));
|
||||||
|
self.media.push(m);
|
||||||
|
});
|
||||||
|
|
||||||
|
// reconstruct msid-semantic -- apparently not necessary
|
||||||
|
/*
|
||||||
|
var msid = SDPUtil.parse_ssrc(this.raw);
|
||||||
|
if (msid.hasOwnProperty('mslabel')) {
|
||||||
|
this.session += "a=msid-semantic: WMS " + msid.mslabel + "\r\n";
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
};
|
||||||
|
|
||||||
|
// translate a jingle content element into an an SDP media part
|
||||||
|
SDP.prototype.jingle2media = function (content) {
|
||||||
|
var media = '',
|
||||||
|
desc = content.find('description'),
|
||||||
|
ssrc = desc.attr('ssrc'),
|
||||||
|
self = this,
|
||||||
|
tmp;
|
||||||
|
var sctp = content.find(
|
||||||
|
'>transport>sctpmap[xmlns="urn:xmpp:jingle:transports:dtls-sctp:1"]');
|
||||||
|
|
||||||
|
tmp = { media: desc.attr('media') };
|
||||||
|
tmp.port = '1';
|
||||||
|
if (content.attr('senders') == 'rejected') {
|
||||||
|
// estos hack to reject an m-line.
|
||||||
|
tmp.port = '0';
|
||||||
|
}
|
||||||
|
if (content.find('>transport>fingerprint').length || desc.find('encryption').length) {
|
||||||
|
if (sctp.length)
|
||||||
|
tmp.proto = 'DTLS/SCTP';
|
||||||
|
else
|
||||||
|
tmp.proto = 'RTP/SAVPF';
|
||||||
|
} else {
|
||||||
|
tmp.proto = 'RTP/AVPF';
|
||||||
|
}
|
||||||
|
if (!sctp.length)
|
||||||
|
{
|
||||||
|
tmp.fmt = desc.find('payload-type').map(
|
||||||
|
function () { return this.getAttribute('id'); }).get();
|
||||||
|
media += SDPUtil.build_mline(tmp) + '\r\n';
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
media += 'm=application 1 DTLS/SCTP ' + sctp.attr('number') + '\r\n';
|
||||||
|
media += 'a=sctpmap:' + sctp.attr('number') +
|
||||||
|
' ' + sctp.attr('protocol');
|
||||||
|
|
||||||
|
var streamCount = sctp.attr('streams');
|
||||||
|
if (streamCount)
|
||||||
|
media += ' ' + streamCount + '\r\n';
|
||||||
|
else
|
||||||
|
media += '\r\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
media += 'c=IN IP4 0.0.0.0\r\n';
|
||||||
|
if (!sctp.length)
|
||||||
|
media += 'a=rtcp:1 IN IP4 0.0.0.0\r\n';
|
||||||
|
//tmp = content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||||
|
tmp = content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||||
|
//console.log('transports: '+content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||||
|
//console.log('bundle.transports: '+content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||||
|
//console.log("tmp fingerprint: "+tmp.find('>fingerprint').innerHTML);
|
||||||
|
if (tmp.length) {
|
||||||
|
if (tmp.attr('ufrag')) {
|
||||||
|
media += SDPUtil.build_iceufrag(tmp.attr('ufrag')) + '\r\n';
|
||||||
|
}
|
||||||
|
if (tmp.attr('pwd')) {
|
||||||
|
media += SDPUtil.build_icepwd(tmp.attr('pwd')) + '\r\n';
|
||||||
|
}
|
||||||
|
tmp.find('>fingerprint').each(function () {
|
||||||
|
// FIXME: check namespace at some point
|
||||||
|
media += 'a=fingerprint:' + this.getAttribute('hash');
|
||||||
|
media += ' ' + $(this).text();
|
||||||
|
media += '\r\n';
|
||||||
|
//console.log("mline "+media);
|
||||||
|
if (this.getAttribute('setup')) {
|
||||||
|
media += 'a=setup:' + this.getAttribute('setup') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
switch (content.attr('senders')) {
|
||||||
|
case 'initiator':
|
||||||
|
media += 'a=sendonly\r\n';
|
||||||
|
break;
|
||||||
|
case 'responder':
|
||||||
|
media += 'a=recvonly\r\n';
|
||||||
|
break;
|
||||||
|
case 'none':
|
||||||
|
media += 'a=inactive\r\n';
|
||||||
|
break;
|
||||||
|
case 'both':
|
||||||
|
media += 'a=sendrecv\r\n';
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
media += 'a=mid:' + content.attr('name') + '\r\n';
|
||||||
|
/*if (content.attr('name') == 'video') {
|
||||||
|
media += 'a=x-google-flag:conference' + '\r\n';
|
||||||
|
}*/
|
||||||
|
|
||||||
|
// <description><rtcp-mux/></description>
|
||||||
|
// see http://code.google.com/p/libjingle/issues/detail?id=309 -- no spec though
|
||||||
|
// and http://mail.jabber.org/pipermail/jingle/2011-December/001761.html
|
||||||
|
if (desc.find('rtcp-mux').length) {
|
||||||
|
media += 'a=rtcp-mux\r\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (desc.find('encryption').length) {
|
||||||
|
desc.find('encryption>crypto').each(function () {
|
||||||
|
media += 'a=crypto:' + this.getAttribute('tag');
|
||||||
|
media += ' ' + this.getAttribute('crypto-suite');
|
||||||
|
media += ' ' + this.getAttribute('key-params');
|
||||||
|
if (this.getAttribute('session-params')) {
|
||||||
|
media += ' ' + this.getAttribute('session-params');
|
||||||
|
}
|
||||||
|
media += '\r\n';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
desc.find('payload-type').each(function () {
|
||||||
|
media += SDPUtil.build_rtpmap(this) + '\r\n';
|
||||||
|
if ($(this).find('>parameter').length) {
|
||||||
|
media += 'a=fmtp:' + this.getAttribute('id') + ' ';
|
||||||
|
media += $(this).find('parameter').map(function () { return (this.getAttribute('name') ? (this.getAttribute('name') + '=') : '') + this.getAttribute('value'); }).get().join('; ');
|
||||||
|
media += '\r\n';
|
||||||
|
}
|
||||||
|
// xep-0293
|
||||||
|
media += self.RtcpFbFromJingle($(this), this.getAttribute('id'));
|
||||||
|
});
|
||||||
|
|
||||||
|
// xep-0293
|
||||||
|
media += self.RtcpFbFromJingle(desc, '*');
|
||||||
|
|
||||||
|
// xep-0294
|
||||||
|
tmp = desc.find('>rtp-hdrext[xmlns="urn:xmpp:jingle:apps:rtp:rtp-hdrext:0"]');
|
||||||
|
tmp.each(function () {
|
||||||
|
media += 'a=extmap:' + this.getAttribute('id') + ' ' + this.getAttribute('uri') + '\r\n';
|
||||||
|
});
|
||||||
|
|
||||||
|
content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]>candidate').each(function () {
|
||||||
|
media += SDPUtil.candidateFromJingle(this);
|
||||||
|
});
|
||||||
|
|
||||||
|
// XEP-0339 handle ssrc-group attributes
|
||||||
|
tmp = content.find('description>ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() {
|
||||||
|
var semantics = this.getAttribute('semantics');
|
||||||
|
var ssrcs = $(this).find('>source').map(function() {
|
||||||
|
return this.getAttribute('ssrc');
|
||||||
|
}).get();
|
||||||
|
|
||||||
|
if (ssrcs.length != 0) {
|
||||||
|
media += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tmp = content.find('description>source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]');
|
||||||
|
tmp.each(function () {
|
||||||
|
var ssrc = this.getAttribute('ssrc');
|
||||||
|
$(this).find('>parameter').each(function () {
|
||||||
|
media += 'a=ssrc:' + ssrc + ' ' + this.getAttribute('name');
|
||||||
|
if (this.getAttribute('value') && this.getAttribute('value').length)
|
||||||
|
media += ':' + this.getAttribute('value');
|
||||||
|
media += '\r\n';
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (tmp.length === 0) {
|
||||||
|
// fallback to proprietary mapping of a=ssrc lines
|
||||||
|
tmp = content.find('description>ssrc[xmlns="http://estos.de/ns/ssrc"]');
|
||||||
|
if (tmp.length) {
|
||||||
|
media += 'a=ssrc:' + ssrc + ' cname:' + tmp.attr('cname') + '\r\n';
|
||||||
|
media += 'a=ssrc:' + ssrc + ' msid:' + tmp.attr('msid') + '\r\n';
|
||||||
|
media += 'a=ssrc:' + ssrc + ' mslabel:' + tmp.attr('mslabel') + '\r\n';
|
||||||
|
media += 'a=ssrc:' + ssrc + ' label:' + tmp.attr('label') + '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return media;
|
||||||
|
};
|
||||||
|
|
||||||
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
@@ -0,0 +1,408 @@
|
|||||||
|
/**
|
||||||
|
* Contains utility classes used in SDP class.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class holds a=ssrc lines and media type a=mid
|
||||||
|
* @param ssrc synchronization source identifier number(a=ssrc lines from SDP)
|
||||||
|
* @param type media type eg. "audio" or "video"(a=mid frm SDP)
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
function ChannelSsrc(ssrc, type) {
|
||||||
|
this.ssrc = ssrc;
|
||||||
|
this.type = type;
|
||||||
|
this.lines = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class holds a=ssrc-group: lines
|
||||||
|
* @param semantics
|
||||||
|
* @param ssrcs
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
function ChannelSsrcGroup(semantics, ssrcs, line) {
|
||||||
|
this.semantics = semantics;
|
||||||
|
this.ssrcs = ssrcs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class represents media channel. Is a container for ChannelSsrc, holds channel idx and media type.
|
||||||
|
* @param channelNumber channel idx in SDP media array.
|
||||||
|
* @param mediaType media type(a=mid)
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
function MediaChannel(channelNumber, mediaType) {
|
||||||
|
/**
|
||||||
|
* SDP channel number
|
||||||
|
* @type {*}
|
||||||
|
*/
|
||||||
|
this.chNumber = channelNumber;
|
||||||
|
/**
|
||||||
|
* Channel media type(a=mid)
|
||||||
|
* @type {*}
|
||||||
|
*/
|
||||||
|
this.mediaType = mediaType;
|
||||||
|
/**
|
||||||
|
* The maps of ssrc numbers to ChannelSsrc objects.
|
||||||
|
*/
|
||||||
|
this.ssrcs = {};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The array of ChannelSsrcGroup objects.
|
||||||
|
* @type {Array}
|
||||||
|
*/
|
||||||
|
this.ssrcGroups = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
SDPUtil = {
|
||||||
|
iceparams: function (mediadesc, sessiondesc) {
|
||||||
|
var data = null;
|
||||||
|
if (SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc) &&
|
||||||
|
SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc)) {
|
||||||
|
data = {
|
||||||
|
ufrag: SDPUtil.parse_iceufrag(SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc)),
|
||||||
|
pwd: SDPUtil.parse_icepwd(SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_iceufrag: function (line) {
|
||||||
|
return line.substring(12);
|
||||||
|
},
|
||||||
|
build_iceufrag: function (frag) {
|
||||||
|
return 'a=ice-ufrag:' + frag;
|
||||||
|
},
|
||||||
|
parse_icepwd: function (line) {
|
||||||
|
return line.substring(10);
|
||||||
|
},
|
||||||
|
build_icepwd: function (pwd) {
|
||||||
|
return 'a=ice-pwd:' + pwd;
|
||||||
|
},
|
||||||
|
parse_mid: function (line) {
|
||||||
|
return line.substring(6);
|
||||||
|
},
|
||||||
|
parse_mline: function (line) {
|
||||||
|
var parts = line.substring(2).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.media = parts.shift();
|
||||||
|
data.port = parts.shift();
|
||||||
|
data.proto = parts.shift();
|
||||||
|
if (parts[parts.length - 1] === '') { // trailing whitespace
|
||||||
|
parts.pop();
|
||||||
|
}
|
||||||
|
data.fmt = parts;
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
build_mline: function (mline) {
|
||||||
|
return 'm=' + mline.media + ' ' + mline.port + ' ' + mline.proto + ' ' + mline.fmt.join(' ');
|
||||||
|
},
|
||||||
|
parse_rtpmap: function (line) {
|
||||||
|
var parts = line.substring(9).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.id = parts.shift();
|
||||||
|
parts = parts[0].split('/');
|
||||||
|
data.name = parts.shift();
|
||||||
|
data.clockrate = parts.shift();
|
||||||
|
data.channels = parts.length ? parts.shift() : '1';
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
|
||||||
|
* @param line eg. "a=sctpmap:5000 webrtc-datachannel"
|
||||||
|
* @returns [SCTP port number, protocol, streams]
|
||||||
|
*/
|
||||||
|
parse_sctpmap: function (line)
|
||||||
|
{
|
||||||
|
var parts = line.substring(10).split(' ');
|
||||||
|
var sctpPort = parts[0];
|
||||||
|
var protocol = parts[1];
|
||||||
|
// Stream count is optional
|
||||||
|
var streamCount = parts.length > 2 ? parts[2] : null;
|
||||||
|
return [sctpPort, protocol, streamCount];// SCTP port
|
||||||
|
},
|
||||||
|
build_rtpmap: function (el) {
|
||||||
|
var line = 'a=rtpmap:' + el.getAttribute('id') + ' ' + el.getAttribute('name') + '/' + el.getAttribute('clockrate');
|
||||||
|
if (el.getAttribute('channels') && el.getAttribute('channels') != '1') {
|
||||||
|
line += '/' + el.getAttribute('channels');
|
||||||
|
}
|
||||||
|
return line;
|
||||||
|
},
|
||||||
|
parse_crypto: function (line) {
|
||||||
|
var parts = line.substring(9).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.tag = parts.shift();
|
||||||
|
data['crypto-suite'] = parts.shift();
|
||||||
|
data['key-params'] = parts.shift();
|
||||||
|
if (parts.length) {
|
||||||
|
data['session-params'] = parts.join(' ');
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_fingerprint: function (line) { // RFC 4572
|
||||||
|
var parts = line.substring(14).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.hash = parts.shift();
|
||||||
|
data.fingerprint = parts.shift();
|
||||||
|
// TODO assert that fingerprint satisfies 2UHEX *(":" 2UHEX) ?
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_fmtp: function (line) {
|
||||||
|
var parts = line.split(' '),
|
||||||
|
i, key, value,
|
||||||
|
data = [];
|
||||||
|
parts.shift();
|
||||||
|
parts = parts.join(' ').split(';');
|
||||||
|
for (i = 0; i < parts.length; i++) {
|
||||||
|
key = parts[i].split('=')[0];
|
||||||
|
while (key.length && key[0] == ' ') {
|
||||||
|
key = key.substring(1);
|
||||||
|
}
|
||||||
|
value = parts[i].split('=')[1];
|
||||||
|
if (key && value) {
|
||||||
|
data.push({name: key, value: value});
|
||||||
|
} else if (key) {
|
||||||
|
// rfc 4733 (DTMF) style stuff
|
||||||
|
data.push({name: '', value: key});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_icecandidate: function (line) {
|
||||||
|
var candidate = {},
|
||||||
|
elems = line.split(' ');
|
||||||
|
candidate.foundation = elems[0].substring(12);
|
||||||
|
candidate.component = elems[1];
|
||||||
|
candidate.protocol = elems[2].toLowerCase();
|
||||||
|
candidate.priority = elems[3];
|
||||||
|
candidate.ip = elems[4];
|
||||||
|
candidate.port = elems[5];
|
||||||
|
// elems[6] => "typ"
|
||||||
|
candidate.type = elems[7];
|
||||||
|
candidate.generation = 0; // default value, may be overwritten below
|
||||||
|
for (var i = 8; i < elems.length; i += 2) {
|
||||||
|
switch (elems[i]) {
|
||||||
|
case 'raddr':
|
||||||
|
candidate['rel-addr'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'rport':
|
||||||
|
candidate['rel-port'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'generation':
|
||||||
|
candidate.generation = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'tcptype':
|
||||||
|
candidate.tcptype = elems[i + 1];
|
||||||
|
break;
|
||||||
|
default: // TODO
|
||||||
|
console.log('parse_icecandidate not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
candidate.network = '1';
|
||||||
|
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||||
|
return candidate;
|
||||||
|
},
|
||||||
|
build_icecandidate: function (cand) {
|
||||||
|
var line = ['a=candidate:' + cand.foundation, cand.component, cand.protocol, cand.priority, cand.ip, cand.port, 'typ', cand.type].join(' ');
|
||||||
|
line += ' ';
|
||||||
|
switch (cand.type) {
|
||||||
|
case 'srflx':
|
||||||
|
case 'prflx':
|
||||||
|
case 'relay':
|
||||||
|
if (cand.hasOwnAttribute('rel-addr') && cand.hasOwnAttribute('rel-port')) {
|
||||||
|
line += 'raddr';
|
||||||
|
line += ' ';
|
||||||
|
line += cand['rel-addr'];
|
||||||
|
line += ' ';
|
||||||
|
line += 'rport';
|
||||||
|
line += ' ';
|
||||||
|
line += cand['rel-port'];
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (cand.hasOwnAttribute('tcptype')) {
|
||||||
|
line += 'tcptype';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.tcptype;
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
line += 'generation';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.hasOwnAttribute('generation') ? cand.generation : '0';
|
||||||
|
return line;
|
||||||
|
},
|
||||||
|
parse_ssrc: function (desc) {
|
||||||
|
// proprietary mapping of a=ssrc lines
|
||||||
|
// TODO: see "Jingle RTP Source Description" by Juberti and P. Thatcher on google docs
|
||||||
|
// and parse according to that
|
||||||
|
var lines = desc.split('\r\n'),
|
||||||
|
data = {};
|
||||||
|
for (var i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].substring(0, 7) == 'a=ssrc:') {
|
||||||
|
var idx = lines[i].indexOf(' ');
|
||||||
|
data[lines[i].substr(idx + 1).split(':', 2)[0]] = lines[i].substr(idx + 1).split(':', 2)[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_rtcpfb: function (line) {
|
||||||
|
var parts = line.substr(10).split(' ');
|
||||||
|
var data = {};
|
||||||
|
data.pt = parts.shift();
|
||||||
|
data.type = parts.shift();
|
||||||
|
data.params = parts;
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_extmap: function (line) {
|
||||||
|
var parts = line.substr(9).split(' ');
|
||||||
|
var data = {};
|
||||||
|
data.value = parts.shift();
|
||||||
|
if (data.value.indexOf('/') != -1) {
|
||||||
|
data.direction = data.value.substr(data.value.indexOf('/') + 1);
|
||||||
|
data.value = data.value.substr(0, data.value.indexOf('/'));
|
||||||
|
} else {
|
||||||
|
data.direction = 'both';
|
||||||
|
}
|
||||||
|
data.uri = parts.shift();
|
||||||
|
data.params = parts;
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
find_line: function (haystack, needle, sessionpart) {
|
||||||
|
var lines = haystack.split('\r\n');
|
||||||
|
for (var i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].substring(0, needle.length) == needle) {
|
||||||
|
return lines[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!sessionpart) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// search session part
|
||||||
|
lines = sessionpart.split('\r\n');
|
||||||
|
for (var j = 0; j < lines.length; j++) {
|
||||||
|
if (lines[j].substring(0, needle.length) == needle) {
|
||||||
|
return lines[j];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
find_lines: function (haystack, needle, sessionpart) {
|
||||||
|
var lines = haystack.split('\r\n'),
|
||||||
|
needles = [];
|
||||||
|
for (var i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].substring(0, needle.length) == needle)
|
||||||
|
needles.push(lines[i]);
|
||||||
|
}
|
||||||
|
if (needles.length || !sessionpart) {
|
||||||
|
return needles;
|
||||||
|
}
|
||||||
|
// search session part
|
||||||
|
lines = sessionpart.split('\r\n');
|
||||||
|
for (var j = 0; j < lines.length; j++) {
|
||||||
|
if (lines[j].substring(0, needle.length) == needle) {
|
||||||
|
needles.push(lines[j]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return needles;
|
||||||
|
},
|
||||||
|
candidateToJingle: function (line) {
|
||||||
|
// a=candidate:2979166662 1 udp 2113937151 192.168.2.100 57698 typ host generation 0
|
||||||
|
// <candidate component=... foundation=... generation=... id=... ip=... network=... port=... priority=... protocol=... type=.../>
|
||||||
|
if (line.indexOf('candidate:') === 0) {
|
||||||
|
line = 'a=' + line;
|
||||||
|
} else if (line.substring(0, 12) != 'a=candidate:') {
|
||||||
|
console.log('parseCandidate called with a line that is not a candidate line');
|
||||||
|
console.log(line);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (line.substring(line.length - 2) == '\r\n') // chomp it
|
||||||
|
line = line.substring(0, line.length - 2);
|
||||||
|
var candidate = {},
|
||||||
|
elems = line.split(' '),
|
||||||
|
i;
|
||||||
|
if (elems[6] != 'typ') {
|
||||||
|
console.log('did not find typ in the right place');
|
||||||
|
console.log(line);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
candidate.foundation = elems[0].substring(12);
|
||||||
|
candidate.component = elems[1];
|
||||||
|
candidate.protocol = elems[2].toLowerCase();
|
||||||
|
candidate.priority = elems[3];
|
||||||
|
candidate.ip = elems[4];
|
||||||
|
candidate.port = elems[5];
|
||||||
|
// elems[6] => "typ"
|
||||||
|
candidate.type = elems[7];
|
||||||
|
|
||||||
|
candidate.generation = '0'; // default, may be overwritten below
|
||||||
|
for (i = 8; i < elems.length; i += 2) {
|
||||||
|
switch (elems[i]) {
|
||||||
|
case 'raddr':
|
||||||
|
candidate['rel-addr'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'rport':
|
||||||
|
candidate['rel-port'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'generation':
|
||||||
|
candidate.generation = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'tcptype':
|
||||||
|
candidate.tcptype = elems[i + 1];
|
||||||
|
break;
|
||||||
|
default: // TODO
|
||||||
|
console.log('not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
candidate.network = '1';
|
||||||
|
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||||
|
return candidate;
|
||||||
|
},
|
||||||
|
candidateFromJingle: function (cand) {
|
||||||
|
var line = 'a=candidate:';
|
||||||
|
line += cand.getAttribute('foundation');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('component');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('protocol'); //.toUpperCase(); // chrome M23 doesn't like this
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('priority');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('ip');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('port');
|
||||||
|
line += ' ';
|
||||||
|
line += 'typ';
|
||||||
|
line += ' ' + cand.getAttribute('type');
|
||||||
|
line += ' ';
|
||||||
|
switch (cand.getAttribute('type')) {
|
||||||
|
case 'srflx':
|
||||||
|
case 'prflx':
|
||||||
|
case 'relay':
|
||||||
|
if (cand.getAttribute('rel-addr') && cand.getAttribute('rel-port')) {
|
||||||
|
line += 'raddr';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('rel-addr');
|
||||||
|
line += ' ';
|
||||||
|
line += 'rport';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('rel-port');
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (cand.getAttribute('protocol').toLowerCase() == 'tcp') {
|
||||||
|
line += 'tcptype';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('tcptype');
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
line += 'generation';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('generation') || '0';
|
||||||
|
return line + '\r\n';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.SDPUtil = SDPUtil;
|
||||||
|
|
||||||
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
/**
|
||||||
|
* Wrapper for built-in http.js to emulate the browser XMLHttpRequest object.
|
||||||
|
*
|
||||||
|
* This can be used with JS designed for browsers to improve reuse of code and
|
||||||
|
* allow the use of existing libraries.
|
||||||
|
*
|
||||||
|
* Usage: include("XMLHttpRequest.js") and use XMLHttpRequest per W3C specs.
|
||||||
|
*
|
||||||
|
* @todo SSL Support
|
||||||
|
* @author Dan DeFelippi <dan@driverdan.com>
|
||||||
|
* @license MIT
|
||||||
|
*/
|
||||||
|
|
||||||
|
var Url = require("url")
|
||||||
|
,sys = require("util");
|
||||||
|
|
||||||
|
exports.XMLHttpRequest = function() {
|
||||||
|
/**
|
||||||
|
* Private variables
|
||||||
|
*/
|
||||||
|
var self = this;
|
||||||
|
var http = require('http');
|
||||||
|
var https = require('https');
|
||||||
|
|
||||||
|
// Holds http.js objects
|
||||||
|
var client;
|
||||||
|
var request;
|
||||||
|
var response;
|
||||||
|
|
||||||
|
// Request settings
|
||||||
|
var settings = {};
|
||||||
|
|
||||||
|
// Set some default headers
|
||||||
|
var defaultHeaders = {
|
||||||
|
"User-Agent": "node.js",
|
||||||
|
"Accept": "*/*",
|
||||||
|
};
|
||||||
|
|
||||||
|
var headers = defaultHeaders;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constants
|
||||||
|
*/
|
||||||
|
this.UNSENT = 0;
|
||||||
|
this.OPENED = 1;
|
||||||
|
this.HEADERS_RECEIVED = 2;
|
||||||
|
this.LOADING = 3;
|
||||||
|
this.DONE = 4;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Public vars
|
||||||
|
*/
|
||||||
|
// Current state
|
||||||
|
this.readyState = this.UNSENT;
|
||||||
|
|
||||||
|
// default ready state change handler in case one is not set or is set late
|
||||||
|
this.onreadystatechange = function() {};
|
||||||
|
|
||||||
|
// Result & response
|
||||||
|
this.responseText = "";
|
||||||
|
this.responseXML = "";
|
||||||
|
this.status = null;
|
||||||
|
this.statusText = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open the connection. Currently supports local server requests.
|
||||||
|
*
|
||||||
|
* @param string method Connection method (eg GET, POST)
|
||||||
|
* @param string url URL for the connection.
|
||||||
|
* @param boolean async Asynchronous connection. Default is true.
|
||||||
|
* @param string user Username for basic authentication (optional)
|
||||||
|
* @param string password Password for basic authentication (optional)
|
||||||
|
*/
|
||||||
|
this.open = function(method, url, async, user, password) {
|
||||||
|
settings = {
|
||||||
|
"method": method,
|
||||||
|
"url": url,
|
||||||
|
"async": async || null,
|
||||||
|
"user": user || null,
|
||||||
|
"password": password || null
|
||||||
|
};
|
||||||
|
|
||||||
|
this.abort();
|
||||||
|
|
||||||
|
setState(this.OPENED);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets a header for the request.
|
||||||
|
*
|
||||||
|
* @param string header Header name
|
||||||
|
* @param string value Header value
|
||||||
|
*/
|
||||||
|
this.setRequestHeader = function(header, value) {
|
||||||
|
headers[header] = value;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a header from the server response.
|
||||||
|
*
|
||||||
|
* @param string header Name of header to get.
|
||||||
|
* @return string Text of the header or null if it doesn't exist.
|
||||||
|
*/
|
||||||
|
this.getResponseHeader = function(header) {
|
||||||
|
if (this.readyState > this.OPENED && response.headers[header]) {
|
||||||
|
return header + ": " + response.headers[header];
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all the response headers.
|
||||||
|
*
|
||||||
|
* @return string
|
||||||
|
*/
|
||||||
|
this.getAllResponseHeaders = function() {
|
||||||
|
if (this.readyState < this.HEADERS_RECEIVED) {
|
||||||
|
throw "INVALID_STATE_ERR: Headers have not been received.";
|
||||||
|
}
|
||||||
|
var result = "";
|
||||||
|
|
||||||
|
for (var i in response.headers) {
|
||||||
|
result += i + ": " + response.headers[i] + "\r\n";
|
||||||
|
}
|
||||||
|
return result.substr(0, result.length - 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sends the request to the server.
|
||||||
|
*
|
||||||
|
* @param string data Optional data to send as request body.
|
||||||
|
*/
|
||||||
|
this.send = function(data) {
|
||||||
|
if (this.readyState != this.OPENED) {
|
||||||
|
throw "INVALID_STATE_ERR: connection must be opened before send() is called";
|
||||||
|
}
|
||||||
|
|
||||||
|
var ssl = false;
|
||||||
|
var url = Url.parse(settings.url);
|
||||||
|
|
||||||
|
// Determine the server
|
||||||
|
switch (url.protocol) {
|
||||||
|
case 'https:':
|
||||||
|
ssl = true;
|
||||||
|
// SSL & non-SSL both need host, no break here.
|
||||||
|
case 'http:':
|
||||||
|
var host = url.hostname;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case undefined:
|
||||||
|
case '':
|
||||||
|
var host = "localhost";
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw "Protocol not supported.";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to port 80. If accessing localhost on another port be sure
|
||||||
|
// to use http://localhost:port/path
|
||||||
|
var port = url.port || (ssl ? 443 : 80);
|
||||||
|
// Add query string if one is used
|
||||||
|
var uri = url.pathname + (url.search ? url.search : '');
|
||||||
|
|
||||||
|
// Set the Host header or the server may reject the request
|
||||||
|
this.setRequestHeader("Host", host);
|
||||||
|
|
||||||
|
// Set content length header
|
||||||
|
if (settings.method == "GET" || settings.method == "HEAD") {
|
||||||
|
data = null;
|
||||||
|
} else if (data) {
|
||||||
|
this.setRequestHeader("Content-Length", Buffer.byteLength(data));
|
||||||
|
|
||||||
|
if (!headers["Content-Type"]) {
|
||||||
|
this.setRequestHeader("Content-Type", "text/plain;charset=UTF-8");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the proper protocol
|
||||||
|
var doRequest = ssl ? https.request : http.request;
|
||||||
|
|
||||||
|
var options = {
|
||||||
|
host: host,
|
||||||
|
port: port,
|
||||||
|
path: uri,
|
||||||
|
method: settings.method,
|
||||||
|
headers: headers,
|
||||||
|
agent: false
|
||||||
|
};
|
||||||
|
|
||||||
|
var req = doRequest(options, function(res) {
|
||||||
|
response = res;
|
||||||
|
response.setEncoding("utf8");
|
||||||
|
|
||||||
|
setState(self.HEADERS_RECEIVED);
|
||||||
|
self.status = response.statusCode;
|
||||||
|
|
||||||
|
response.on('data', function(chunk) {
|
||||||
|
// Make sure there's some data
|
||||||
|
if (chunk) {
|
||||||
|
self.responseText += chunk;
|
||||||
|
}
|
||||||
|
setState(self.LOADING);
|
||||||
|
});
|
||||||
|
|
||||||
|
response.on('end', function() {
|
||||||
|
setState(self.DONE);
|
||||||
|
});
|
||||||
|
|
||||||
|
response.on('error', function() {
|
||||||
|
self.handleError(error);
|
||||||
|
});
|
||||||
|
}).on('error', function(error) {
|
||||||
|
self.handleError(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
req.setHeader("Connection", "Close");
|
||||||
|
|
||||||
|
// Node 0.4 and later won't accept empty data. Make sure it's needed.
|
||||||
|
if (data) {
|
||||||
|
req.write(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
req.end();
|
||||||
|
};
|
||||||
|
|
||||||
|
this.handleError = function(error) {
|
||||||
|
this.status = 503;
|
||||||
|
this.statusText = error;
|
||||||
|
this.responseText = error.stack;
|
||||||
|
setState(this.DONE);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aborts a request.
|
||||||
|
*/
|
||||||
|
this.abort = function() {
|
||||||
|
headers = defaultHeaders;
|
||||||
|
this.readyState = this.UNSENT;
|
||||||
|
this.responseText = "";
|
||||||
|
this.responseXML = "";
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Changes readyState and calls onreadystatechange.
|
||||||
|
*
|
||||||
|
* @param int state New state
|
||||||
|
*/
|
||||||
|
var setState = function(state) {
|
||||||
|
self.readyState = state;
|
||||||
|
self.onreadystatechange();
|
||||||
|
}
|
||||||
|
};
|
||||||
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
// This code was written by Tyler Akins and has been placed in the
|
||||||
|
// public domain. It would be nice if you left this header intact.
|
||||||
|
// Base64 code from Tyler Akins -- http://rumkin.com
|
||||||
|
|
||||||
|
var Base64 = (function () {
|
||||||
|
var keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
||||||
|
|
||||||
|
var obj = {
|
||||||
|
/**
|
||||||
|
* Encodes a string in base64
|
||||||
|
* @param {String} input The string to encode in base64.
|
||||||
|
*/
|
||||||
|
encode: function (input) {
|
||||||
|
var output = "";
|
||||||
|
var chr1, chr2, chr3;
|
||||||
|
var enc1, enc2, enc3, enc4;
|
||||||
|
var i = 0;
|
||||||
|
|
||||||
|
do {
|
||||||
|
chr1 = input.charCodeAt(i++);
|
||||||
|
chr2 = input.charCodeAt(i++);
|
||||||
|
chr3 = input.charCodeAt(i++);
|
||||||
|
|
||||||
|
enc1 = chr1 >> 2;
|
||||||
|
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
|
||||||
|
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
|
||||||
|
enc4 = chr3 & 63;
|
||||||
|
|
||||||
|
if (isNaN(chr2)) {
|
||||||
|
enc3 = enc4 = 64;
|
||||||
|
} else if (isNaN(chr3)) {
|
||||||
|
enc4 = 64;
|
||||||
|
}
|
||||||
|
|
||||||
|
output = output + keyStr.charAt(enc1) + keyStr.charAt(enc2) +
|
||||||
|
keyStr.charAt(enc3) + keyStr.charAt(enc4);
|
||||||
|
} while (i < input.length);
|
||||||
|
|
||||||
|
return output;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decodes a base64 string.
|
||||||
|
* @param {String} input The string to decode.
|
||||||
|
*/
|
||||||
|
decode: function (input) {
|
||||||
|
var output = "";
|
||||||
|
var chr1, chr2, chr3;
|
||||||
|
var enc1, enc2, enc3, enc4;
|
||||||
|
var i = 0;
|
||||||
|
|
||||||
|
// remove all characters that are not A-Z, a-z, 0-9, +, /, or =
|
||||||
|
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, '');
|
||||||
|
|
||||||
|
do {
|
||||||
|
enc1 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
enc2 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
enc3 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
enc4 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
|
||||||
|
chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||||
|
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||||
|
chr3 = ((enc3 & 3) << 6) | enc4;
|
||||||
|
|
||||||
|
output = output + String.fromCharCode(chr1);
|
||||||
|
|
||||||
|
if (enc3 != 64) {
|
||||||
|
output = output + String.fromCharCode(chr2);
|
||||||
|
}
|
||||||
|
if (enc4 != 64) {
|
||||||
|
output = output + String.fromCharCode(chr3);
|
||||||
|
}
|
||||||
|
} while (i < input.length);
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Nodify
|
||||||
|
exports.Base64 = Base64;
|
||||||
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
/*
|
||||||
|
* A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
|
||||||
|
* Digest Algorithm, as defined in RFC 1321.
|
||||||
|
* Version 2.1 Copyright (C) Paul Johnston 1999 - 2002.
|
||||||
|
* Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
|
||||||
|
* Distributed under the BSD License
|
||||||
|
* See http://pajhome.org.uk/crypt/md5 for more info.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var MD5 = (function () {
|
||||||
|
/*
|
||||||
|
* Configurable variables. You may need to tweak these to be compatible with
|
||||||
|
* the server-side, but the defaults work in most cases.
|
||||||
|
*/
|
||||||
|
var hexcase = 0; /* hex output format. 0 - lowercase; 1 - uppercase */
|
||||||
|
var b64pad = ""; /* base-64 pad character. "=" for strict RFC compliance */
|
||||||
|
var chrsz = 8; /* bits per input character. 8 - ASCII; 16 - Unicode */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Add integers, wrapping at 2^32. This uses 16-bit operations internally
|
||||||
|
* to work around bugs in some JS interpreters.
|
||||||
|
*/
|
||||||
|
var safe_add = function (x, y) {
|
||||||
|
var lsw = (x & 0xFFFF) + (y & 0xFFFF);
|
||||||
|
var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
|
||||||
|
return (msw << 16) | (lsw & 0xFFFF);
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Bitwise rotate a 32-bit number to the left.
|
||||||
|
*/
|
||||||
|
var bit_rol = function (num, cnt) {
|
||||||
|
return (num << cnt) | (num >>> (32 - cnt));
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert a string to an array of little-endian words
|
||||||
|
* If chrsz is ASCII, characters >255 have their hi-byte silently ignored.
|
||||||
|
*/
|
||||||
|
var str2binl = function (str) {
|
||||||
|
var bin = [];
|
||||||
|
var mask = (1 << chrsz) - 1;
|
||||||
|
for(var i = 0; i < str.length * chrsz; i += chrsz)
|
||||||
|
{
|
||||||
|
bin[i>>5] |= (str.charCodeAt(i / chrsz) & mask) << (i%32);
|
||||||
|
}
|
||||||
|
return bin;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert an array of little-endian words to a string
|
||||||
|
*/
|
||||||
|
var binl2str = function (bin) {
|
||||||
|
var str = "";
|
||||||
|
var mask = (1 << chrsz) - 1;
|
||||||
|
for(var i = 0; i < bin.length * 32; i += chrsz)
|
||||||
|
{
|
||||||
|
str += String.fromCharCode((bin[i>>5] >>> (i % 32)) & mask);
|
||||||
|
}
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert an array of little-endian words to a hex string.
|
||||||
|
*/
|
||||||
|
var binl2hex = function (binarray) {
|
||||||
|
var hex_tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
|
||||||
|
var str = "";
|
||||||
|
for(var i = 0; i < binarray.length * 4; i++)
|
||||||
|
{
|
||||||
|
str += hex_tab.charAt((binarray[i>>2] >> ((i%4)*8+4)) & 0xF) +
|
||||||
|
hex_tab.charAt((binarray[i>>2] >> ((i%4)*8 )) & 0xF);
|
||||||
|
}
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert an array of little-endian words to a base-64 string
|
||||||
|
*/
|
||||||
|
var binl2b64 = function (binarray) {
|
||||||
|
var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||||
|
var str = "";
|
||||||
|
var triplet, j;
|
||||||
|
for(var i = 0; i < binarray.length * 4; i += 3)
|
||||||
|
{
|
||||||
|
triplet = (((binarray[i >> 2] >> 8 * ( i %4)) & 0xFF) << 16) |
|
||||||
|
(((binarray[i+1 >> 2] >> 8 * ((i+1)%4)) & 0xFF) << 8 ) |
|
||||||
|
((binarray[i+2 >> 2] >> 8 * ((i+2)%4)) & 0xFF);
|
||||||
|
for(j = 0; j < 4; j++)
|
||||||
|
{
|
||||||
|
if(i * 8 + j * 6 > binarray.length * 32) { str += b64pad; }
|
||||||
|
else { str += tab.charAt((triplet >> 6*(3-j)) & 0x3F); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* These functions implement the four basic operations the algorithm uses.
|
||||||
|
*/
|
||||||
|
var md5_cmn = function (q, a, b, x, s, t) {
|
||||||
|
return safe_add(bit_rol(safe_add(safe_add(a, q),safe_add(x, t)), s),b);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_ff = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn((b & c) | ((~b) & d), a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_gg = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn((b & d) | (c & (~d)), a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_hh = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn(b ^ c ^ d, a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_ii = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn(c ^ (b | (~d)), a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Calculate the MD5 of an array of little-endian words, and a bit length
|
||||||
|
*/
|
||||||
|
var core_md5 = function (x, len) {
|
||||||
|
/* append padding */
|
||||||
|
x[len >> 5] |= 0x80 << ((len) % 32);
|
||||||
|
x[(((len + 64) >>> 9) << 4) + 14] = len;
|
||||||
|
|
||||||
|
var a = 1732584193;
|
||||||
|
var b = -271733879;
|
||||||
|
var c = -1732584194;
|
||||||
|
var d = 271733878;
|
||||||
|
|
||||||
|
var olda, oldb, oldc, oldd;
|
||||||
|
for (var i = 0; i < x.length; i += 16)
|
||||||
|
{
|
||||||
|
olda = a;
|
||||||
|
oldb = b;
|
||||||
|
oldc = c;
|
||||||
|
oldd = d;
|
||||||
|
|
||||||
|
a = md5_ff(a, b, c, d, x[i+ 0], 7 , -680876936);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+ 1], 12, -389564586);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+ 2], 17, 606105819);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+ 3], 22, -1044525330);
|
||||||
|
a = md5_ff(a, b, c, d, x[i+ 4], 7 , -176418897);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+ 5], 12, 1200080426);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+ 6], 17, -1473231341);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+ 7], 22, -45705983);
|
||||||
|
a = md5_ff(a, b, c, d, x[i+ 8], 7 , 1770035416);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+ 9], 12, -1958414417);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+10], 17, -42063);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+11], 22, -1990404162);
|
||||||
|
a = md5_ff(a, b, c, d, x[i+12], 7 , 1804603682);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+13], 12, -40341101);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+14], 17, -1502002290);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+15], 22, 1236535329);
|
||||||
|
|
||||||
|
a = md5_gg(a, b, c, d, x[i+ 1], 5 , -165796510);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+ 6], 9 , -1069501632);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+11], 14, 643717713);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+ 0], 20, -373897302);
|
||||||
|
a = md5_gg(a, b, c, d, x[i+ 5], 5 , -701558691);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+10], 9 , 38016083);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+15], 14, -660478335);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+ 4], 20, -405537848);
|
||||||
|
a = md5_gg(a, b, c, d, x[i+ 9], 5 , 568446438);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+14], 9 , -1019803690);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+ 3], 14, -187363961);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+ 8], 20, 1163531501);
|
||||||
|
a = md5_gg(a, b, c, d, x[i+13], 5 , -1444681467);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+ 2], 9 , -51403784);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+ 7], 14, 1735328473);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+12], 20, -1926607734);
|
||||||
|
|
||||||
|
a = md5_hh(a, b, c, d, x[i+ 5], 4 , -378558);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+ 8], 11, -2022574463);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+11], 16, 1839030562);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+14], 23, -35309556);
|
||||||
|
a = md5_hh(a, b, c, d, x[i+ 1], 4 , -1530992060);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+ 4], 11, 1272893353);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+ 7], 16, -155497632);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+10], 23, -1094730640);
|
||||||
|
a = md5_hh(a, b, c, d, x[i+13], 4 , 681279174);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+ 0], 11, -358537222);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+ 3], 16, -722521979);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+ 6], 23, 76029189);
|
||||||
|
a = md5_hh(a, b, c, d, x[i+ 9], 4 , -640364487);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+12], 11, -421815835);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+15], 16, 530742520);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+ 2], 23, -995338651);
|
||||||
|
|
||||||
|
a = md5_ii(a, b, c, d, x[i+ 0], 6 , -198630844);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+ 7], 10, 1126891415);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+14], 15, -1416354905);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+ 5], 21, -57434055);
|
||||||
|
a = md5_ii(a, b, c, d, x[i+12], 6 , 1700485571);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+ 3], 10, -1894986606);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+10], 15, -1051523);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+ 1], 21, -2054922799);
|
||||||
|
a = md5_ii(a, b, c, d, x[i+ 8], 6 , 1873313359);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+15], 10, -30611744);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+ 6], 15, -1560198380);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+13], 21, 1309151649);
|
||||||
|
a = md5_ii(a, b, c, d, x[i+ 4], 6 , -145523070);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+11], 10, -1120210379);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+ 2], 15, 718787259);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+ 9], 21, -343485551);
|
||||||
|
|
||||||
|
a = safe_add(a, olda);
|
||||||
|
b = safe_add(b, oldb);
|
||||||
|
c = safe_add(c, oldc);
|
||||||
|
d = safe_add(d, oldd);
|
||||||
|
}
|
||||||
|
return [a, b, c, d];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Calculate the HMAC-MD5, of a key and some data
|
||||||
|
*/
|
||||||
|
var core_hmac_md5 = function (key, data) {
|
||||||
|
var bkey = str2binl(key);
|
||||||
|
if(bkey.length > 16) { bkey = core_md5(bkey, key.length * chrsz); }
|
||||||
|
|
||||||
|
var ipad = new Array(16), opad = new Array(16);
|
||||||
|
for(var i = 0; i < 16; i++)
|
||||||
|
{
|
||||||
|
ipad[i] = bkey[i] ^ 0x36363636;
|
||||||
|
opad[i] = bkey[i] ^ 0x5C5C5C5C;
|
||||||
|
}
|
||||||
|
|
||||||
|
var hash = core_md5(ipad.concat(str2binl(data)), 512 + data.length * chrsz);
|
||||||
|
return core_md5(opad.concat(hash), 512 + 128);
|
||||||
|
};
|
||||||
|
|
||||||
|
var obj = {
|
||||||
|
/*
|
||||||
|
* These are the functions you'll usually want to call.
|
||||||
|
* They take string arguments and return either hex or base-64 encoded
|
||||||
|
* strings.
|
||||||
|
*/
|
||||||
|
hexdigest: function (s) {
|
||||||
|
return binl2hex(core_md5(str2binl(s), s.length * chrsz));
|
||||||
|
},
|
||||||
|
|
||||||
|
b64digest: function (s) {
|
||||||
|
return binl2b64(core_md5(str2binl(s), s.length * chrsz));
|
||||||
|
},
|
||||||
|
|
||||||
|
hash: function (s) {
|
||||||
|
return binl2str(core_md5(str2binl(s), s.length * chrsz));
|
||||||
|
},
|
||||||
|
|
||||||
|
hmac_hexdigest: function (key, data) {
|
||||||
|
return binl2hex(core_hmac_md5(key, data));
|
||||||
|
},
|
||||||
|
|
||||||
|
hmac_b64digest: function (key, data) {
|
||||||
|
return binl2b64(core_hmac_md5(key, data));
|
||||||
|
},
|
||||||
|
|
||||||
|
hmac_hash: function (key, data) {
|
||||||
|
return binl2str(core_hmac_md5(key, data));
|
||||||
|
},
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Perform a simple self-test to see if the VM is working
|
||||||
|
*/
|
||||||
|
test: function () {
|
||||||
|
return MD5.hexdigest("abc") === "900150983cd24fb0d6963f7d28e17f72";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Nodify
|
||||||
|
exports.MD5 = MD5;
|
||||||
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
File diff suppressed because it is too large
Load Diff
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
var strophe = require("./strophe/strophe.js").Strophe;
|
||||||
|
|
||||||
|
var Strophe = strophe.Strophe;
|
||||||
|
var $iq = strophe.$iq;
|
||||||
|
var $msg = strophe.$msg;
|
||||||
|
var $build = strophe.$build;
|
||||||
|
var $pres = strophe.$pres;
|
||||||
|
|
||||||
|
var jsdom = require("jsdom");
|
||||||
|
var window = jsdom.jsdom().parentWindow;
|
||||||
|
var $ = require('jquery')(window);
|
||||||
|
|
||||||
|
var stropheJingle = require("./strophe.jingle.sdp.js");
|
||||||
|
|
||||||
|
|
||||||
|
var input = '';
|
||||||
|
|
||||||
|
process.stdin.on('readable', function() {
|
||||||
|
var chunk = process.stdin.read();
|
||||||
|
if (chunk !== null) {
|
||||||
|
input += chunk;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
process.stdin.on('end', function() {
|
||||||
|
if (process.argv[2] == '--jingle') {
|
||||||
|
var elem = $(input);
|
||||||
|
// app does:
|
||||||
|
// sess.setRemoteDescription($(iq).find('>jingle'), 'offer');
|
||||||
|
//console.log(elem.find('>content'));
|
||||||
|
var sdp = new stropheJingle.SDP('');
|
||||||
|
sdp.fromJingle(elem);
|
||||||
|
console.log(sdp.raw);
|
||||||
|
} else if (process.argv[2] == '--sdp') {
|
||||||
|
var sdp = new stropheJingle.SDP(input);
|
||||||
|
var accept = $iq({to: '%(tojid)s',
|
||||||
|
type: 'set'})
|
||||||
|
.c('jingle', {xmlns: 'urn:xmpp:jingle:1',
|
||||||
|
//action: 'session-accept',
|
||||||
|
action: '%(action)s',
|
||||||
|
initiator: '%(initiator)s',
|
||||||
|
responder: '%(responder)s',
|
||||||
|
sid: '%(sid)s' });
|
||||||
|
sdp.toJingle(accept, 'responder');
|
||||||
|
console.log(Strophe.serialize(accept));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ Add a new job to the main prometheus.yml file:
|
|||||||
```
|
```
|
||||||
|
|
||||||
An example of a Prometheus configuration with workers can be found in
|
An example of a Prometheus configuration with workers can be found in
|
||||||
[metrics-howto.md](https://matrix-org.github.io/synapse/latest/metrics-howto.html).
|
[metrics-howto.md](https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md).
|
||||||
|
|
||||||
To use `synapse.rules` add
|
To use `synapse.rules` add
|
||||||
|
|
||||||
|
|||||||
@@ -92,6 +92,22 @@ new PromConsole.Graph({
|
|||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
<h3>Pending calls per tick</h3>
|
||||||
|
<div id="reactor_pending_calls"></div>
|
||||||
|
<script>
|
||||||
|
new PromConsole.Graph({
|
||||||
|
node: document.querySelector("#reactor_pending_calls"),
|
||||||
|
expr: "rate(python_twisted_reactor_pending_calls_sum[30s]) / rate(python_twisted_reactor_pending_calls_count[30s])",
|
||||||
|
name: "[[job]]-[[index]]",
|
||||||
|
min: 0,
|
||||||
|
renderer: "line",
|
||||||
|
height: 150,
|
||||||
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
|
yTitle: "Pending Calls"
|
||||||
|
})
|
||||||
|
</script>
|
||||||
|
|
||||||
<h1>Storage</h1>
|
<h1>Storage</h1>
|
||||||
|
|
||||||
<h3>Queries</h3>
|
<h3>Queries</h3>
|
||||||
|
|||||||
@@ -3,9 +3,8 @@ Purge history API examples
|
|||||||
|
|
||||||
# `purge_history.sh`
|
# `purge_history.sh`
|
||||||
|
|
||||||
A bash file, that uses the
|
A bash file, that uses the [purge history API](/docs/admin_api/purge_history_api.rst) to
|
||||||
[purge history API](https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html)
|
purge all messages in a list of rooms up to a certain event. You can select a
|
||||||
to purge all messages in a list of rooms up to a certain event. You can select a
|
|
||||||
timeframe or a number of messages that you want to keep in the room.
|
timeframe or a number of messages that you want to keep in the room.
|
||||||
|
|
||||||
Just configure the variables DOMAIN, ADMIN, ROOMS_ARRAY and TIME at the top of
|
Just configure the variables DOMAIN, ADMIN, ROOMS_ARRAY and TIME at the top of
|
||||||
@@ -13,6 +12,5 @@ the script.
|
|||||||
|
|
||||||
# `purge_remote_media.sh`
|
# `purge_remote_media.sh`
|
||||||
|
|
||||||
A bash file, that uses the
|
A bash file, that uses the [purge history API](/docs/admin_api/purge_history_api.rst) to
|
||||||
[purge history API](https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html)
|
purge all old cached remote media.
|
||||||
to purge all old cached remote media.
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/bin/bash
|
||||||
|
|
||||||
# this script will use the api:
|
# this script will use the api:
|
||||||
# https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html
|
# https://github.com/matrix-org/synapse/blob/master/docs/admin_api/purge_history_api.rst
|
||||||
#
|
#
|
||||||
# It will purge all messages in a list of rooms up to a cetrain event
|
# It will purge all messages in a list of rooms up to a cetrain event
|
||||||
|
|
||||||
@@ -84,9 +84,7 @@ AUTH="Authorization: Bearer $TOKEN"
|
|||||||
###################################################################################################
|
###################################################################################################
|
||||||
# finally start pruning the room:
|
# finally start pruning the room:
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
# this will really delete local events, so the messages in the room really
|
POSTDATA='{"delete_local_events":"true"}' # this will really delete local events, so the messages in the room really disappear unless they are restored by remote federation
|
||||||
# disappear unless they are restored by remote federation. This is because
|
|
||||||
# we pass {"delete_local_events":true} to the curl invocation below.
|
|
||||||
|
|
||||||
for ROOM in "${ROOMS_ARRAY[@]}"; do
|
for ROOM in "${ROOMS_ARRAY[@]}"; do
|
||||||
echo "########################################### $(date) ################# "
|
echo "########################################### $(date) ################# "
|
||||||
@@ -106,7 +104,7 @@ for ROOM in "${ROOMS_ARRAY[@]}"; do
|
|||||||
SLEEP=2
|
SLEEP=2
|
||||||
set -x
|
set -x
|
||||||
# call purge
|
# call purge
|
||||||
OUT=$(curl --header "$AUTH" -s -d '{"delete_local_events":true}' POST "$API_URL/admin/purge_history/$ROOM/$EVENT_ID")
|
OUT=$(curl --header "$AUTH" -s -d $POSTDATA POST "$API_URL/admin/purge_history/$ROOM/$EVENT_ID")
|
||||||
PURGE_ID=$(echo "$OUT" |grep purge_id|cut -d'"' -f4 )
|
PURGE_ID=$(echo "$OUT" |grep purge_id|cut -d'"' -f4 )
|
||||||
if [ "$PURGE_ID" == "" ]; then
|
if [ "$PURGE_ID" == "" ]; then
|
||||||
# probably the history purge is already in progress for $ROOM
|
# probably the history purge is already in progress for $ROOM
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env bash
|
#!/bin/bash
|
||||||
|
|
||||||
DOMAIN=yourserver.tld
|
DOMAIN=yourserver.tld
|
||||||
# add this user as admin in your home server:
|
# add this user as admin in your home server:
|
||||||
|
|||||||
88
contrib/scripts/kick_users.py
Executable file
88
contrib/scripts/kick_users.py
Executable file
@@ -0,0 +1,88 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import urllib
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
def _mkurl(template, kws):
|
||||||
|
for key in kws:
|
||||||
|
template = template.replace(key, kws[key])
|
||||||
|
return template
|
||||||
|
|
||||||
|
|
||||||
|
def main(hs, room_id, access_token, user_id_prefix, why):
|
||||||
|
if not why:
|
||||||
|
why = "Automated kick."
|
||||||
|
print(
|
||||||
|
"Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix)
|
||||||
|
)
|
||||||
|
room_state_url = _mkurl(
|
||||||
|
"$HS/_matrix/client/api/v1/rooms/$ROOM/state?access_token=$TOKEN",
|
||||||
|
{"$HS": hs, "$ROOM": room_id, "$TOKEN": access_token},
|
||||||
|
)
|
||||||
|
print("Getting room state => %s" % room_state_url)
|
||||||
|
res = requests.get(room_state_url)
|
||||||
|
print("HTTP %s" % res.status_code)
|
||||||
|
state_events = res.json()
|
||||||
|
if "error" in state_events:
|
||||||
|
print("FATAL")
|
||||||
|
print(state_events)
|
||||||
|
return
|
||||||
|
|
||||||
|
kick_list = []
|
||||||
|
room_name = room_id
|
||||||
|
for event in state_events:
|
||||||
|
if not event["type"] == "m.room.member":
|
||||||
|
if event["type"] == "m.room.name":
|
||||||
|
room_name = event["content"].get("name")
|
||||||
|
continue
|
||||||
|
if not event["content"].get("membership") == "join":
|
||||||
|
continue
|
||||||
|
if event["state_key"].startswith(user_id_prefix):
|
||||||
|
kick_list.append(event["state_key"])
|
||||||
|
|
||||||
|
if len(kick_list) == 0:
|
||||||
|
print("No user IDs match the prefix '%s'" % user_id_prefix)
|
||||||
|
return
|
||||||
|
|
||||||
|
print("The following user IDs will be kicked from %s" % room_name)
|
||||||
|
for uid in kick_list:
|
||||||
|
print(uid)
|
||||||
|
doit = input("Continue? [Y]es\n")
|
||||||
|
if len(doit) > 0 and doit.lower() == "y":
|
||||||
|
print("Kicking members...")
|
||||||
|
# encode them all
|
||||||
|
kick_list = [urllib.quote(uid) for uid in kick_list]
|
||||||
|
for uid in kick_list:
|
||||||
|
kick_url = _mkurl(
|
||||||
|
"$HS/_matrix/client/api/v1/rooms/$ROOM/state/m.room.member/$UID?access_token=$TOKEN",
|
||||||
|
{"$HS": hs, "$UID": uid, "$ROOM": room_id, "$TOKEN": access_token},
|
||||||
|
)
|
||||||
|
kick_body = {"membership": "leave", "reason": why}
|
||||||
|
print("Kicking %s" % uid)
|
||||||
|
res = requests.put(kick_url, data=json.dumps(kick_body))
|
||||||
|
if res.status_code != 200:
|
||||||
|
print("ERROR: HTTP %s" % res.status_code)
|
||||||
|
if res.json().get("error"):
|
||||||
|
print("ERROR: JSON %s" % res.json())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser("Kick members in a room matching a certain user ID prefix.")
|
||||||
|
parser.add_argument("-u", "--user-id", help="The user ID prefix e.g. '@irc_'")
|
||||||
|
parser.add_argument("-t", "--token", help="Your access_token")
|
||||||
|
parser.add_argument("-r", "--room", help="The room ID to kick members in")
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--homeserver", help="The base HS url e.g. http://matrix.org"
|
||||||
|
)
|
||||||
|
parser.add_argument("-w", "--why", help="Reason for the kick. Optional.")
|
||||||
|
args = parser.parse_args()
|
||||||
|
if not args.room or not args.token or not args.user_id or not args.homeserver:
|
||||||
|
parser.print_help()
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
main(args.homeserver, args.room, args.token, args.user_id, args.why)
|
||||||
@@ -1,3 +1,2 @@
|
|||||||
The documentation for using systemd to manage synapse workers is now part of
|
The documentation for using systemd to manage synapse workers is now part of
|
||||||
the main synapse distribution. See
|
the main synapse distribution. See [docs/systemd-with-workers](../../docs/systemd-with-workers).
|
||||||
[docs/systemd-with-workers](https://matrix-org.github.io/synapse/latest/systemd-with-workers/index.html).
|
|
||||||
|
|||||||
@@ -2,8 +2,7 @@
|
|||||||
This is a setup for managing synapse with a user contributed systemd unit
|
This is a setup for managing synapse with a user contributed systemd unit
|
||||||
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
||||||
to accommodate your installation in accordance with the installation
|
to accommodate your installation in accordance with the installation
|
||||||
instructions provided in
|
instructions provided in [installation instructions](../../INSTALL.md).
|
||||||
[installation instructions](https://matrix-org.github.io/synapse/latest/setup/installation.html).
|
|
||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
1. Under the service section, ensure the `User` variable matches which user
|
1. Under the service section, ensure the `User` variable matches which user
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
[Service]
|
|
||||||
# The following directives give the synapse service R/W access to:
|
|
||||||
# - /run/matrix-synapse
|
|
||||||
# - /var/lib/matrix-synapse
|
|
||||||
# - /var/log/matrix-synapse
|
|
||||||
|
|
||||||
RuntimeDirectory=matrix-synapse
|
|
||||||
StateDirectory=matrix-synapse
|
|
||||||
LogsDirectory=matrix-synapse
|
|
||||||
|
|
||||||
######################
|
|
||||||
## Security Sandbox ##
|
|
||||||
######################
|
|
||||||
|
|
||||||
# Make sure that the service has its own unshared tmpfs at /tmp and that it
|
|
||||||
# cannot see or change any real devices
|
|
||||||
PrivateTmp=true
|
|
||||||
PrivateDevices=true
|
|
||||||
|
|
||||||
# We give no capabilities to a service by default
|
|
||||||
CapabilityBoundingSet=
|
|
||||||
AmbientCapabilities=
|
|
||||||
|
|
||||||
# Protect the following from modification:
|
|
||||||
# - The entire filesystem
|
|
||||||
# - sysctl settings and loaded kernel modules
|
|
||||||
# - No modifications allowed to Control Groups
|
|
||||||
# - Hostname
|
|
||||||
# - System Clock
|
|
||||||
ProtectSystem=strict
|
|
||||||
ProtectKernelTunables=true
|
|
||||||
ProtectKernelModules=true
|
|
||||||
ProtectControlGroups=true
|
|
||||||
ProtectClock=true
|
|
||||||
ProtectHostname=true
|
|
||||||
|
|
||||||
# Prevent access to the following:
|
|
||||||
# - /home directory
|
|
||||||
# - Kernel logs
|
|
||||||
ProtectHome=tmpfs
|
|
||||||
ProtectKernelLogs=true
|
|
||||||
|
|
||||||
# Make sure that the process can only see PIDs and process details of itself,
|
|
||||||
# and the second option disables seeing details of things like system load and
|
|
||||||
# I/O etc
|
|
||||||
ProtectProc=invisible
|
|
||||||
ProcSubset=pid
|
|
||||||
|
|
||||||
# While not needed, we set these options explicitly
|
|
||||||
# - This process has been given access to the host network
|
|
||||||
# - It can also communicate with any IP Address
|
|
||||||
PrivateNetwork=false
|
|
||||||
RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX
|
|
||||||
IPAddressAllow=any
|
|
||||||
|
|
||||||
# Restrict system calls to a sane bunch
|
|
||||||
SystemCallArchitectures=native
|
|
||||||
SystemCallFilter=@system-service
|
|
||||||
SystemCallFilter=~@privileged @resources @obsolete
|
|
||||||
|
|
||||||
# Misc restrictions
|
|
||||||
# - Since the process is a python process it needs to be able to write and
|
|
||||||
# execute memory regions, so we set MemoryDenyWriteExecute to false
|
|
||||||
RestrictSUIDSGID=true
|
|
||||||
RemoveIPC=true
|
|
||||||
NoNewPrivileges=true
|
|
||||||
RestrictRealtime=true
|
|
||||||
RestrictNamespaces=true
|
|
||||||
LockPersonality=true
|
|
||||||
PrivateUsers=true
|
|
||||||
MemoryDenyWriteExecute=false
|
|
||||||
65
debian/build_virtualenv
vendored
65
debian/build_virtualenv
vendored
@@ -15,7 +15,7 @@ export DH_VIRTUALENV_INSTALL_ROOT=/opt/venvs
|
|||||||
# python won't look in the right directory. At least this way, the error will
|
# python won't look in the right directory. At least this way, the error will
|
||||||
# be a *bit* more obvious.
|
# be a *bit* more obvious.
|
||||||
#
|
#
|
||||||
SNAKE=$(readlink -e /usr/bin/python3)
|
SNAKE=`readlink -e /usr/bin/python3`
|
||||||
|
|
||||||
# try to set the CFLAGS so any compiled C extensions are compiled with the most
|
# try to set the CFLAGS so any compiled C extensions are compiled with the most
|
||||||
# generic as possible x64 instructions, so that compiling it on a new Intel chip
|
# generic as possible x64 instructions, so that compiling it on a new Intel chip
|
||||||
@@ -24,29 +24,15 @@ SNAKE=$(readlink -e /usr/bin/python3)
|
|||||||
# TODO: add similar things for non-amd64, or figure out a more generic way to
|
# TODO: add similar things for non-amd64, or figure out a more generic way to
|
||||||
# do this.
|
# do this.
|
||||||
|
|
||||||
case $(dpkg-architecture -q DEB_HOST_ARCH) in
|
case `dpkg-architecture -q DEB_HOST_ARCH` in
|
||||||
amd64)
|
amd64)
|
||||||
export CFLAGS=-march=x86-64
|
export CFLAGS=-march=x86-64
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Manually install Poetry and export a pip-compatible `requirements.txt`
|
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
||||||
# We need a Poetry pre-release as the export command is buggy in < 1.2
|
# than the 2/3 compatible `virtualenv`.
|
||||||
TEMP_VENV="$(mktemp -d)"
|
|
||||||
python3 -m venv "$TEMP_VENV"
|
|
||||||
source "$TEMP_VENV/bin/activate"
|
|
||||||
pip install -U pip
|
|
||||||
pip install poetry==1.2.0b1
|
|
||||||
poetry export \
|
|
||||||
--extras all \
|
|
||||||
--extras test \
|
|
||||||
--extras systemd \
|
|
||||||
-o exported_requirements.txt
|
|
||||||
deactivate
|
|
||||||
rm -rf "$TEMP_VENV"
|
|
||||||
|
|
||||||
# Use --no-deps to only install pinned versions in exported_requirements.txt,
|
|
||||||
# and to avoid https://github.com/pypa/pip/issues/9644
|
|
||||||
dh_virtualenv \
|
dh_virtualenv \
|
||||||
--install-suffix "matrix-synapse" \
|
--install-suffix "matrix-synapse" \
|
||||||
--builtin-venv \
|
--builtin-venv \
|
||||||
@@ -54,38 +40,26 @@ dh_virtualenv \
|
|||||||
--upgrade-pip \
|
--upgrade-pip \
|
||||||
--preinstall="lxml" \
|
--preinstall="lxml" \
|
||||||
--preinstall="mock" \
|
--preinstall="mock" \
|
||||||
--preinstall="wheel" \
|
|
||||||
--extra-pip-arg="--no-deps" \
|
|
||||||
--extra-pip-arg="--no-cache-dir" \
|
--extra-pip-arg="--no-cache-dir" \
|
||||||
--extra-pip-arg="--compile" \
|
--extra-pip-arg="--compile" \
|
||||||
--extras="all,systemd,test" \
|
--extras="all,systemd,test"
|
||||||
--requirements="exported_requirements.txt"
|
|
||||||
|
|
||||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||||
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
|
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
|
||||||
|
|
||||||
case "$DEB_BUILD_OPTIONS" in
|
# we copy the tests to a temporary directory so that we can put them on the
|
||||||
*nocheck*)
|
|
||||||
# Skip running tests if "nocheck" present in $DEB_BUILD_OPTIONS
|
|
||||||
;;
|
|
||||||
|
|
||||||
*)
|
|
||||||
# Copy tests to a temporary directory so that we can put them on the
|
|
||||||
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
|
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
|
||||||
tmpdir=$(mktemp -d)
|
tmpdir=`mktemp -d`
|
||||||
trap 'rm -r $tmpdir' EXIT
|
trap "rm -r $tmpdir" EXIT
|
||||||
|
|
||||||
cp -r tests "$tmpdir"
|
cp -r tests "$tmpdir"
|
||||||
|
|
||||||
PYTHONPATH="$tmpdir" \
|
PYTHONPATH="$tmpdir" \
|
||||||
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
|
"${TARGET_PYTHON}" -B -m twisted.trial --reporter=text -j2 tests
|
||||||
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
# build the config file
|
# build the config file
|
||||||
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_config" \
|
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_config" \
|
||||||
--config-dir="/etc/matrix-synapse" \
|
--config-dir="/etc/matrix-synapse" \
|
||||||
--data-dir="/var/lib/matrix-synapse" |
|
--data-dir="/var/lib/matrix-synapse" |
|
||||||
perl -pe '
|
perl -pe '
|
||||||
@@ -111,24 +85,9 @@ esac
|
|||||||
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
||||||
|
|
||||||
# build the log config file
|
# build the log config file
|
||||||
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
||||||
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
||||||
|
|
||||||
# add a dependency on the right version of python to substvars.
|
# add a dependency on the right version of python to substvars.
|
||||||
PYPKG=$(basename "$SNAKE")
|
PYPKG=`basename $SNAKE`
|
||||||
echo "synapse:pydepends=$PYPKG" >> debian/matrix-synapse-py3.substvars
|
echo "synapse:pydepends=$PYPKG" >> debian/matrix-synapse-py3.substvars
|
||||||
|
|
||||||
|
|
||||||
# add a couple of triggers. This is needed so that dh-virtualenv can rebuild
|
|
||||||
# the venv when the system python changes (see
|
|
||||||
# https://dh-virtualenv.readthedocs.io/en/latest/tutorial.html#step-2-set-up-packaging-for-your-project)
|
|
||||||
#
|
|
||||||
# we do it here rather than the more conventional way of just adding it to
|
|
||||||
# debian/matrix-synapse-py3.triggers, because we need to add a trigger on the
|
|
||||||
# right version of python.
|
|
||||||
cat >>"debian/.debhelper/generated/matrix-synapse-py3/triggers" <<EOF
|
|
||||||
# triggers for dh-virtualenv
|
|
||||||
interest-noawait $SNAKE
|
|
||||||
interest dh-virtualenv-interpreter-update
|
|
||||||
|
|
||||||
EOF
|
|
||||||
|
|||||||
635
debian/changelog
vendored
635
debian/changelog
vendored
@@ -1,639 +1,8 @@
|
|||||||
matrix-synapse-py3 (1.62.0~rc3) stable; urgency=medium
|
matrix-synapse-py3 (1.25.0ubuntu1) UNRELEASED; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.62.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 04 Jul 2022 16:07:01 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.62.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.62.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Jul 2022 11:42:41 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.62.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.62.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jun 2022 16:34:57 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.61.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.61.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jun 2022 14:33:46 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.61.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.61.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Jun 2022 11:44:19 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.61.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* Remove unused `jitsimeetbridge` experiment from `contrib` directory.
|
|
||||||
* New Synapse release 1.61.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Jun 2022 12:42:31 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.60.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.60.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 31 May 2022 13:41:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.60.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.60.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 27 May 2022 11:04:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.60.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.60.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 24 May 2022 12:05:01 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.59.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.59.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 May 2022 11:41:46 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.59.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.59.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 May 2022 10:26:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.59.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.59.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 16 May 2022 12:52:15 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.59.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* Adjust how the `exported-requirements.txt` file is generated as part of
|
|
||||||
the process of building these packages. This affects the package
|
|
||||||
maintainers only; end-users are unaffected.
|
|
||||||
* New Synapse release 1.59.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 May 2022 10:45:08 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.58.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* Include python dependencies from the `systemd` and `cache_memory` extras package groups, which
|
|
||||||
were incorrectly omitted from the 1.58.0 package.
|
|
||||||
* New Synapse release 1.58.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 05 May 2022 14:58:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.58.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.58.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 May 2022 10:52:58 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.58.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.58.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 17:14:56 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.58.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* Use poetry to manage the bundled virtualenv included with this package.
|
|
||||||
* New Synapse release 1.58.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 11:15:20 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.57.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.57.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Apr 2022 15:27:21 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.57.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.57.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Apr 2022 10:58:42 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.57.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.57.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Apr 2022 13:36:25 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.56.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.56.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Apr 2022 12:38:39 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.56.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.56.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Mar 2022 10:40:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.55.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.55.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 19:07:11 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.55.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.55.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 17:44:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.55.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.55.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Mar 2022 13:59:26 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.55.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.55.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Mar 2022 10:59:31 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.54.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.54.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Mar 2022 10:54:52 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.54.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.54.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 02 Mar 2022 10:43:22 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.53.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.53.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Feb 2022 11:32:06 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.53.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.53.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Feb 2022 10:40:50 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.52.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.52.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Feb 2022 11:34:54 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.52.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.52.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Feb 2022 11:04:09 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.51.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.51.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Jan 2022 11:28:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.51.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.51.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 24 Jan 2022 12:25:00 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.51.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.51.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 21 Jan 2022 10:46:02 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 24 Jan 2022 13:37:11 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jan 2022 16:06:26 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jan 2022 10:40:38 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 14 Jan 2022 11:18:06 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Jan 2022 12:36:17 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.49.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.49.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Dec 2021 17:31:03 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.49.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.49.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Dec 2021 11:07:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.49.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.49.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Dec 2021 12:39:46 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.49.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.49.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Dec 2021 13:52:21 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.48.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.48.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Nov 2021 11:24:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.48.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.48.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Nov 2021 15:56:03 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.47.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.47.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 19 Nov 2021 13:44:32 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.47.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.47.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 17 Nov 2021 13:09:43 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.47.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.47.0~rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Nov 2021 14:32:47 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.47.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Update scripts to pass Shellcheck lints.
|
|
||||||
* Remove unused Vagrant scripts from debian/ directory.
|
|
||||||
* Allow building Debian packages for any architecture, not just amd64.
|
|
||||||
* Preinstall the "wheel" package when building virtualenvs.
|
|
||||||
* Do not error if /etc/default/matrix-synapse is missing.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.47.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 10 Nov 2021 09:41:01 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.46.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Compress debs with xz, to fix incompatibility of impish debs with reprepro.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.46.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Nov 2021 13:22:53 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.46.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.46.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Oct 2021 14:04:04 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.45.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.45.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Oct 2021 11:58:27 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.45.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.45.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Oct 2021 11:18:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.45.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.45.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 14 Oct 2021 10:58:24 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.45.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Nick @ Beeper ]
|
|
||||||
* Include an `update_synapse_database` script in the distribution.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.45.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Oct 2021 10:46:27 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.44.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.44.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Oct 2021 13:43:57 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.44.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.44.0~rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 04 Oct 2021 14:57:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.44.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.44.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 30 Sep 2021 12:39:10 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.44.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.44.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Sep 2021 13:41:28 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.43.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.43.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Sep 2021 11:49:05 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.43.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.43.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 17 Sep 2021 10:43:21 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.43.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.43.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Sep 2021 11:39:46 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.42.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.42.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Sep 2021 16:19:09 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.42.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.42.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 06 Sep 2021 15:25:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.42.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.42.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 01 Sep 2021 11:37:48 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.41.1) stable; urgency=high
|
|
||||||
|
|
||||||
* New synapse release 1.41.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Aug 2021 12:59:10 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.41.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.41.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Aug 2021 15:31:45 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.41.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.41.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Aug 2021 15:52:00 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.40.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.40.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Aug 2021 13:50:48 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.40.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.40.0~rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 09 Aug 2021 13:41:08 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.40.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.40.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 04 Aug 2021 17:08:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.40.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Drop backwards-compatibility code that was required to support Ubuntu Xenial.
|
|
||||||
* Update package triggers so that the virtualenv is correctly rebuilt
|
|
||||||
when the system python is rebuilt, on recent Python versions.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.40.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Aug 2021 11:31:49 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.39.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.39.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 29 Jul 2021 09:59:00 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.39.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.39.0~rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 28 Jul 2021 13:30:58 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.38.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.38.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 22 Jul 2021 15:37:06 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.39.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.39.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Jul 2021 14:28:34 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.38.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.38.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jul 2021 13:20:56 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.38.0rc3) prerelease; urgency=medium
|
|
||||||
|
|
||||||
[ Erik Johnston ]
|
|
||||||
* Add synapse_review_recent_signups script
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.38.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jul 2021 11:53:56 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.37.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.37.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jun 2021 12:24:06 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.37.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.37.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Jun 2021 10:15:25 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.36.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.36.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jun 2021 15:41:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.35.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.35.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 03 Jun 2021 08:11:29 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.35.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.35.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jun 2021 13:23:35 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.34.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.34.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 May 2021 11:34:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.33.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.33.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 May 2021 11:17:59 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.33.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.33.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 06 May 2021 14:06:33 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.33.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.33.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 May 2021 14:15:27 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.32.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.32.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 22 Apr 2021 12:43:52 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.32.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.32.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 21 Apr 2021 14:00:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.32.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Skip tests when DEB_BUILD_OPTIONS contains "nocheck".
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.32.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Apr 2021 14:28:39 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.31.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.31.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Apr 2021 13:08:29 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.30.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.30.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 26 Mar 2021 12:01:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.30.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.30.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 22 Mar 2021 13:15:34 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.29.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Jonathan de Jong ]
|
|
||||||
* Remove the python -B flag (don't generate bytecode) in scripts and documentation.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.29.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 08 Mar 2021 13:51:50 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.28.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.28.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Feb 2021 10:21:57 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.27.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Fix build on Ubuntu 16.04 LTS (Xenial).
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.27.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Feb 2021 13:11:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.26.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Remove dependency on `python3-distutils`.
|
* Remove dependency on `python3-distutils`.
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
-- Richard van der Hoff <richard@matrix.org> Fri, 15 Jan 2021 12:44:19 +0000
|
||||||
* New synapse release 1.26.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 27 Jan 2021 12:43:35 -0500
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.25.0) stable; urgency=medium
|
matrix-synapse-py3 (1.25.0) stable; urgency=medium
|
||||||
|
|
||||||
|
|||||||
1
debian/clean
vendored
1
debian/clean
vendored
@@ -1 +0,0 @@
|
|||||||
exported_requirements.txt
|
|
||||||
2
debian/compat
vendored
2
debian/compat
vendored
@@ -1 +1 @@
|
|||||||
10
|
9
|
||||||
|
|||||||
7
debian/control
vendored
7
debian/control
vendored
@@ -3,8 +3,11 @@ Section: contrib/python
|
|||||||
Priority: extra
|
Priority: extra
|
||||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
||||||
|
# TODO: Remove the dependency on dh-systemd after dropping support for Ubuntu xenial
|
||||||
|
# On all other supported releases, it's merely a transitional package which
|
||||||
|
# does nothing but depends on debhelper (> 9.20160709)
|
||||||
Build-Depends:
|
Build-Depends:
|
||||||
debhelper (>= 10),
|
debhelper (>= 9.20160709) | dh-systemd,
|
||||||
dh-virtualenv (>= 1.1),
|
dh-virtualenv (>= 1.1),
|
||||||
libsystemd-dev,
|
libsystemd-dev,
|
||||||
libpq-dev,
|
libpq-dev,
|
||||||
@@ -19,7 +22,7 @@ Standards-Version: 3.9.8
|
|||||||
Homepage: https://github.com/matrix-org/synapse
|
Homepage: https://github.com/matrix-org/synapse
|
||||||
|
|
||||||
Package: matrix-synapse-py3
|
Package: matrix-synapse-py3
|
||||||
Architecture: any
|
Architecture: amd64
|
||||||
Provides: matrix-synapse
|
Provides: matrix-synapse
|
||||||
Conflicts:
|
Conflicts:
|
||||||
matrix-synapse (<< 0.34.0.1-0matrix2),
|
matrix-synapse (<< 0.34.0.1-0matrix2),
|
||||||
|
|||||||
23
debian/copyright
vendored
23
debian/copyright
vendored
@@ -22,6 +22,29 @@ Files: synapse/config/repository.py
|
|||||||
Copyright: 2014-2015, matrix.org
|
Copyright: 2014-2015, matrix.org
|
||||||
License: Apache-2.0
|
License: Apache-2.0
|
||||||
|
|
||||||
|
Files: contrib/jitsimeetbridge/unjingle/strophe/base64.js
|
||||||
|
Copyright: Public Domain (Tyler Akins http://rumkin.com)
|
||||||
|
License: public-domain
|
||||||
|
This code was written by Tyler Akins and has been placed in the
|
||||||
|
public domain. It would be nice if you left this header intact.
|
||||||
|
Base64 code from Tyler Akins -- http://rumkin.com
|
||||||
|
|
||||||
|
Files: contrib/jitsimeetbridge/unjingle/strophe/md5.js
|
||||||
|
Copyright: 1999-2002, Paul Johnston & Contributors
|
||||||
|
License: BSD-3-clause
|
||||||
|
|
||||||
|
Files: contrib/jitsimeetbridge/unjingle/strophe/strophe.js
|
||||||
|
Copyright: 2006-2008, OGG, LLC
|
||||||
|
License: Expat
|
||||||
|
|
||||||
|
Files: contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
|
||||||
|
Copyright: 2010 passive.ly LLC
|
||||||
|
License: Expat
|
||||||
|
|
||||||
|
Files: contrib/jitsimeetbridge/unjingle/*.js
|
||||||
|
Copyright: 2014 Jitsi
|
||||||
|
License: Apache-2.0
|
||||||
|
|
||||||
Files: debian/*
|
Files: debian/*
|
||||||
Copyright: 2016-2017, Erik Johnston <erik@matrix.org>
|
Copyright: 2016-2017, Erik Johnston <erik@matrix.org>
|
||||||
2017, Rahul De <rahulde@swecha.net>
|
2017, Rahul De <rahulde@swecha.net>
|
||||||
|
|||||||
42
debian/hash_password.1
vendored
42
debian/hash_password.1
vendored
@@ -1,58 +1,90 @@
|
|||||||
.\" generated with Ronn-NG/v0.8.0
|
.\" generated with Ronn/v0.7.3
|
||||||
.\" http://github.com/apjanke/ronn-ng/tree/0.8.0
|
.\" http://github.com/rtomayko/ronn/tree/0.7.3
|
||||||
.TH "HASH_PASSWORD" "1" "July 2021" "" ""
|
.
|
||||||
|
.TH "HASH_PASSWORD" "1" "February 2017" "" ""
|
||||||
|
.
|
||||||
.SH "NAME"
|
.SH "NAME"
|
||||||
\fBhash_password\fR \- Calculate the hash of a new password, so that passwords can be reset
|
\fBhash_password\fR \- Calculate the hash of a new password, so that passwords can be reset
|
||||||
|
.
|
||||||
.SH "SYNOPSIS"
|
.SH "SYNOPSIS"
|
||||||
\fBhash_password\fR [\fB\-p\fR|\fB\-\-password\fR [password]] [\fB\-c\fR|\fB\-\-config\fR \fIfile\fR]
|
\fBhash_password\fR [\fB\-p\fR|\fB\-\-password\fR [password]] [\fB\-c\fR|\fB\-\-config\fR \fIfile\fR]
|
||||||
|
.
|
||||||
.SH "DESCRIPTION"
|
.SH "DESCRIPTION"
|
||||||
\fBhash_password\fR calculates the hash of a supplied password using bcrypt\.
|
\fBhash_password\fR calculates the hash of a supplied password using bcrypt\.
|
||||||
|
.
|
||||||
.P
|
.P
|
||||||
\fBhash_password\fR takes a password as an parameter either on the command line or the \fBSTDIN\fR if not supplied\.
|
\fBhash_password\fR takes a password as an parameter either on the command line or the \fBSTDIN\fR if not supplied\.
|
||||||
|
.
|
||||||
.P
|
.P
|
||||||
It accepts an YAML file which can be used to specify parameters like the number of rounds for bcrypt and password_config section having the pepper value used for the hashing\. By default \fBbcrypt_rounds\fR is set to \fB10\fR\.
|
It accepts an YAML file which can be used to specify parameters like the number of rounds for bcrypt and password_config section having the pepper value used for the hashing\. By default \fBbcrypt_rounds\fR is set to \fB10\fR\.
|
||||||
|
.
|
||||||
.P
|
.P
|
||||||
The hashed password is written on the \fBSTDOUT\fR\.
|
The hashed password is written on the \fBSTDOUT\fR\.
|
||||||
|
.
|
||||||
.SH "FILES"
|
.SH "FILES"
|
||||||
A sample YAML file accepted by \fBhash_password\fR is described below:
|
A sample YAML file accepted by \fBhash_password\fR is described below:
|
||||||
|
.
|
||||||
.P
|
.P
|
||||||
bcrypt_rounds: 17 password_config: pepper: "random hashing pepper"
|
bcrypt_rounds: 17 password_config: pepper: "random hashing pepper"
|
||||||
|
.
|
||||||
.SH "OPTIONS"
|
.SH "OPTIONS"
|
||||||
|
.
|
||||||
.TP
|
.TP
|
||||||
\fB\-p\fR, \fB\-\-password\fR
|
\fB\-p\fR, \fB\-\-password\fR
|
||||||
Read the password form the command line if [password] is supplied\. If not, prompt the user and read the password form the \fBSTDIN\fR\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
|
Read the password form the command line if [password] is supplied\. If not, prompt the user and read the password form the \fBSTDIN\fR\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
|
||||||
|
.
|
||||||
.TP
|
.TP
|
||||||
\fB\-c\fR, \fB\-\-config\fR
|
\fB\-c\fR, \fB\-\-config\fR
|
||||||
Read the supplied YAML \fIfile\fR containing the options \fBbcrypt_rounds\fR and the \fBpassword_config\fR section containing the \fBpepper\fR value\.
|
Read the supplied YAML \fIfile\fR containing the options \fBbcrypt_rounds\fR and the \fBpassword_config\fR section containing the \fBpepper\fR value\.
|
||||||
|
.
|
||||||
.SH "EXAMPLES"
|
.SH "EXAMPLES"
|
||||||
Hash from the command line:
|
Hash from the command line:
|
||||||
|
.
|
||||||
.IP "" 4
|
.IP "" 4
|
||||||
|
.
|
||||||
.nf
|
.nf
|
||||||
|
|
||||||
$ hash_password \-p "p@ssw0rd"
|
$ hash_password \-p "p@ssw0rd"
|
||||||
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8\.X8fWFpum7SxZ9MFe
|
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8\.X8fWFpum7SxZ9MFe
|
||||||
|
.
|
||||||
.fi
|
.fi
|
||||||
|
.
|
||||||
.IP "" 0
|
.IP "" 0
|
||||||
|
.
|
||||||
.P
|
.P
|
||||||
Hash from the STDIN:
|
Hash from the STDIN:
|
||||||
|
.
|
||||||
.IP "" 4
|
.IP "" 4
|
||||||
|
.
|
||||||
.nf
|
.nf
|
||||||
|
|
||||||
$ hash_password
|
$ hash_password
|
||||||
Password:
|
Password:
|
||||||
Confirm password:
|
Confirm password:
|
||||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX\.rcuAbM8ErLoUhybG
|
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX\.rcuAbM8ErLoUhybG
|
||||||
|
.
|
||||||
.fi
|
.fi
|
||||||
|
.
|
||||||
.IP "" 0
|
.IP "" 0
|
||||||
|
.
|
||||||
.P
|
.P
|
||||||
Using a config file:
|
Using a config file:
|
||||||
|
.
|
||||||
.IP "" 4
|
.IP "" 4
|
||||||
|
.
|
||||||
.nf
|
.nf
|
||||||
|
|
||||||
$ hash_password \-c config\.yml
|
$ hash_password \-c config\.yml
|
||||||
Password:
|
Password:
|
||||||
Confirm password:
|
Confirm password:
|
||||||
$2b$12$CwI\.wBNr\.w3kmiUlV3T5s\.GT2wH7uebDCovDrCOh18dFedlANK99O
|
$2b$12$CwI\.wBNr\.w3kmiUlV3T5s\.GT2wH7uebDCovDrCOh18dFedlANK99O
|
||||||
|
.
|
||||||
.fi
|
.fi
|
||||||
|
.
|
||||||
.IP "" 0
|
.IP "" 0
|
||||||
|
.
|
||||||
.SH "COPYRIGHT"
|
.SH "COPYRIGHT"
|
||||||
This man page was written by Rahul De <\fI\%mailto:rahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
|
This man page was written by Rahul De <\fIrahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
|
||||||
|
.
|
||||||
.SH "SEE ALSO"
|
.SH "SEE ALSO"
|
||||||
synctl(1), synapse_port_db(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
|
synctl(1), synapse_port_db(1), register_new_matrix_user(1)
|
||||||
|
|||||||
2
debian/hash_password.ronn
vendored
2
debian/hash_password.ronn
vendored
@@ -66,4 +66,4 @@ for Debian GNU/Linux distribution.
|
|||||||
|
|
||||||
## SEE ALSO
|
## SEE ALSO
|
||||||
|
|
||||||
synctl(1), synapse_port_db(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
|
synctl(1), synapse_port_db(1), register_new_matrix_user(1)
|
||||||
|
|||||||
1
debian/manpages
vendored
1
debian/manpages
vendored
@@ -1,5 +1,4 @@
|
|||||||
debian/hash_password.1
|
debian/hash_password.1
|
||||||
debian/register_new_matrix_user.1
|
debian/register_new_matrix_user.1
|
||||||
debian/synapse_port_db.1
|
debian/synapse_port_db.1
|
||||||
debian/synapse_review_recent_signups.1
|
|
||||||
debian/synctl.1
|
debian/synctl.1
|
||||||
|
|||||||
1
debian/matrix-synapse-py3.config
vendored
1
debian/matrix-synapse-py3.config
vendored
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# shellcheck disable=SC1091
|
|
||||||
. /usr/share/debconf/confmodule
|
. /usr/share/debconf/confmodule
|
||||||
|
|
||||||
# try to update the debconf db according to whatever is in the config files
|
# try to update the debconf db according to whatever is in the config files
|
||||||
|
|||||||
2
debian/matrix-synapse-py3.links
vendored
2
debian/matrix-synapse-py3.links
vendored
@@ -1,6 +1,4 @@
|
|||||||
opt/venvs/matrix-synapse/bin/hash_password usr/bin/hash_password
|
opt/venvs/matrix-synapse/bin/hash_password usr/bin/hash_password
|
||||||
opt/venvs/matrix-synapse/bin/register_new_matrix_user usr/bin/register_new_matrix_user
|
opt/venvs/matrix-synapse/bin/register_new_matrix_user usr/bin/register_new_matrix_user
|
||||||
opt/venvs/matrix-synapse/bin/synapse_port_db usr/bin/synapse_port_db
|
opt/venvs/matrix-synapse/bin/synapse_port_db usr/bin/synapse_port_db
|
||||||
opt/venvs/matrix-synapse/bin/synapse_review_recent_signups usr/bin/synapse_review_recent_signups
|
|
||||||
opt/venvs/matrix-synapse/bin/synctl usr/bin/synctl
|
opt/venvs/matrix-synapse/bin/synctl usr/bin/synctl
|
||||||
opt/venvs/matrix-synapse/bin/update_synapse_database usr/bin/update_synapse_database
|
|
||||||
|
|||||||
1
debian/matrix-synapse-py3.postinst
vendored
1
debian/matrix-synapse-py3.postinst
vendored
@@ -1,6 +1,5 @@
|
|||||||
#!/bin/sh -e
|
#!/bin/sh -e
|
||||||
|
|
||||||
# shellcheck disable=SC1091
|
|
||||||
. /usr/share/debconf/confmodule
|
. /usr/share/debconf/confmodule
|
||||||
|
|
||||||
CONFIGFILE_SERVERNAME="/etc/matrix-synapse/conf.d/server_name.yaml"
|
CONFIGFILE_SERVERNAME="/etc/matrix-synapse/conf.d/server_name.yaml"
|
||||||
|
|||||||
9
debian/matrix-synapse-py3.triggers
vendored
Normal file
9
debian/matrix-synapse-py3.triggers
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Register interest in Python interpreter changes and
|
||||||
|
# don't make the Python package dependent on the virtualenv package
|
||||||
|
# processing (noawait)
|
||||||
|
interest-noawait /usr/bin/python3.5
|
||||||
|
interest-noawait /usr/bin/python3.6
|
||||||
|
interest-noawait /usr/bin/python3.7
|
||||||
|
|
||||||
|
# Also provide a symbolic trigger for all dh-virtualenv packages
|
||||||
|
interest dh-virtualenv-interpreter-update
|
||||||
2
debian/matrix-synapse.service
vendored
2
debian/matrix-synapse.service
vendored
@@ -5,7 +5,7 @@ Description=Synapse Matrix homeserver
|
|||||||
Type=notify
|
Type=notify
|
||||||
User=matrix-synapse
|
User=matrix-synapse
|
||||||
WorkingDirectory=/var/lib/matrix-synapse
|
WorkingDirectory=/var/lib/matrix-synapse
|
||||||
EnvironmentFile=-/etc/default/matrix-synapse
|
EnvironmentFile=/etc/default/matrix-synapse
|
||||||
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
|
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
|
||||||
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/
|
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/
|
||||||
ExecReload=/bin/kill -HUP $MAINPID
|
ExecReload=/bin/kill -HUP $MAINPID
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user