mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-07 01:20:16 +00:00
Compare commits
1 Commits
v1.75.0
...
initial_sy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a8dbb624b3 |
@@ -1,91 +0,0 @@
|
|||||||
{{- /*gotype: github.com/haveyoudebuggedit/gotestfmt/parser.Package*/ -}}
|
|
||||||
{{- /*
|
|
||||||
This template contains the format for an individual package. GitHub actions does not currently support nested groups so
|
|
||||||
we are creating a stylized header for each package.
|
|
||||||
|
|
||||||
This template is based on https://github.com/haveyoudebuggedit/gotestfmt/blob/f179b0e462a9dcf7101515d87eec4e4d7e58b92a/.gotestfmt/github/package.gotpl
|
|
||||||
which is under the Unlicense licence.
|
|
||||||
*/ -}}
|
|
||||||
{{- $settings := .Settings -}}
|
|
||||||
{{- if and (or (not $settings.HideSuccessfulPackages) (ne .Result "PASS")) (or (not $settings.HideEmptyPackages) (ne .Result "SKIP") (ne (len .TestCases) 0)) -}}
|
|
||||||
{{- if eq .Result "PASS" -}}
|
|
||||||
{{ "\033" }}[0;32m
|
|
||||||
{{- else if eq .Result "SKIP" -}}
|
|
||||||
{{ "\033" }}[0;33m
|
|
||||||
{{- else -}}
|
|
||||||
{{ "\033" }}[0;31m
|
|
||||||
{{- end -}}
|
|
||||||
📦 {{ .Name }}{{- "\033" }}[0m
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{ . }}% coverage){{- "\033" -}}[0m
|
|
||||||
{{- end -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- with .Reason -}}
|
|
||||||
{{- " " -}}🛑 {{ . -}}{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- . -}}{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- with .TestCases -}}
|
|
||||||
{{- /* Passing tests are first */ -}}
|
|
||||||
{{- range . -}}
|
|
||||||
{{- if eq .Result "PASS" -}}
|
|
||||||
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
, coverage: {{ . }}%
|
|
||||||
{{- end -}})
|
|
||||||
{{- "\033" -}}[0m
|
|
||||||
{{- "\n" -}}
|
|
||||||
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- formatTestOutput . $settings -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
::endgroup::{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
{{- /* Then skipped tests are second */ -}}
|
|
||||||
{{- range . -}}
|
|
||||||
{{- if eq .Result "SKIP" -}}
|
|
||||||
::group::{{ "\033" }}[0;33m🚧{{ " " }}{{- .Name -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
, coverage: {{ . }}%
|
|
||||||
{{- end -}})
|
|
||||||
{{- "\033" -}}[0m
|
|
||||||
{{- "\n" -}}
|
|
||||||
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- formatTestOutput . $settings -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
::endgroup::{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
{{- /* and failing tests are last */ -}}
|
|
||||||
{{- range . -}}
|
|
||||||
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
|
||||||
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
, coverage: {{ . }}%
|
|
||||||
{{- end -}})
|
|
||||||
{{- "\033" -}}[0m
|
|
||||||
{{- "\n" -}}
|
|
||||||
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- formatTestOutput . $settings -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
::endgroup::{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
---
|
|
||||||
title: CI run against latest deps is failing
|
|
||||||
---
|
|
||||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# Configuration file used for testing the 'synapse_port_db' script.
|
|
||||||
# Tells the script to connect to the postgresql database that will be available in the
|
|
||||||
# CI's Docker setup at the point where this file is considered.
|
|
||||||
server_name: "localhost:8800"
|
|
||||||
|
|
||||||
signing_key_path: ".ci/test.signing.key"
|
|
||||||
|
|
||||||
report_stats: false
|
|
||||||
|
|
||||||
database:
|
|
||||||
name: "psycopg2"
|
|
||||||
args:
|
|
||||||
user: postgres
|
|
||||||
host: localhost
|
|
||||||
password: postgres
|
|
||||||
database: synapse
|
|
||||||
|
|
||||||
# Suppress the key server warning.
|
|
||||||
trusted_key_servers: []
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
|
|
||||||
# compatible wheel, if so rename the wheel before repairing it.
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
from typing import Optional
|
|
||||||
from zipfile import ZipFile
|
|
||||||
|
|
||||||
from packaging.tags import Tag
|
|
||||||
from packaging.utils import parse_wheel_filename
|
|
||||||
from packaging.version import Version
|
|
||||||
|
|
||||||
|
|
||||||
def check_is_abi3_compatible(wheel_file: str) -> None:
|
|
||||||
"""Check the contents of the built wheel for any `.so` files that are *not*
|
|
||||||
abi3 compatible.
|
|
||||||
"""
|
|
||||||
|
|
||||||
with ZipFile(wheel_file, "r") as wheel:
|
|
||||||
for file in wheel.namelist():
|
|
||||||
if not file.endswith(".so"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not file.endswith(".abi3.so"):
|
|
||||||
raise Exception(f"Found non-abi3 lib: {file}")
|
|
||||||
|
|
||||||
|
|
||||||
def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
|
||||||
"""Replaces the cpython wheel file with a ABI3 compatible wheel"""
|
|
||||||
|
|
||||||
if tag.abi == "abi3":
|
|
||||||
# Nothing to do.
|
|
||||||
return wheel_file
|
|
||||||
|
|
||||||
check_is_abi3_compatible(wheel_file)
|
|
||||||
|
|
||||||
abi3_tag = Tag(tag.interpreter, "abi3", tag.platform)
|
|
||||||
|
|
||||||
dirname = os.path.dirname(wheel_file)
|
|
||||||
new_wheel_file = os.path.join(
|
|
||||||
dirname,
|
|
||||||
f"{name}-{version}-{abi3_tag}.whl",
|
|
||||||
)
|
|
||||||
|
|
||||||
os.rename(wheel_file, new_wheel_file)
|
|
||||||
|
|
||||||
print("Renamed wheel to", new_wheel_file)
|
|
||||||
|
|
||||||
return new_wheel_file
|
|
||||||
|
|
||||||
|
|
||||||
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
|
|
||||||
"""Entry point"""
|
|
||||||
|
|
||||||
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
|
||||||
# normalizes the package name (i.e. it converts matrix_synapse ->
|
|
||||||
# matrix-synapse), which is not what we want.
|
|
||||||
_, version, build, tags = parse_wheel_filename(os.path.basename(wheel_file))
|
|
||||||
name = os.path.basename(wheel_file).split("-")[0]
|
|
||||||
|
|
||||||
if len(tags) != 1:
|
|
||||||
# We expect only a wheel file with only a single tag
|
|
||||||
raise Exception(f"Unexpectedly found multiple tags: {tags}")
|
|
||||||
|
|
||||||
tag = next(iter(tags))
|
|
||||||
|
|
||||||
if build:
|
|
||||||
# We don't use build tags in Synapse
|
|
||||||
raise Exception(f"Unexpected build tag: {build}")
|
|
||||||
|
|
||||||
# If the wheel is for cpython then convert it into an abi3 wheel.
|
|
||||||
if tag.interpreter.startswith("cp"):
|
|
||||||
wheel_file = cpython(wheel_file, name, version, tag)
|
|
||||||
|
|
||||||
# Finally, repair the wheel.
|
|
||||||
if archs is not None:
|
|
||||||
# If we are given archs then we are on macos and need to use
|
|
||||||
# `delocate-listdeps`.
|
|
||||||
subprocess.run(["delocate-listdeps", wheel_file], check=True)
|
|
||||||
subprocess.run(
|
|
||||||
["delocate-wheel", "--require-archs", archs, "-w", dest_dir, wheel_file],
|
|
||||||
check=True,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
subprocess.run(["auditwheel", "repair", "-w", dest_dir, wheel_file], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Tag wheel as abi3 and repair it.")
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--wheel-dir",
|
|
||||||
"-w",
|
|
||||||
metavar="WHEEL_DIR",
|
|
||||||
help="Directory to store delocated wheels",
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--require-archs",
|
|
||||||
metavar="archs",
|
|
||||||
default=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"wheel_file",
|
|
||||||
metavar="WHEEL_FILE",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
wheel_file = args.wheel_file
|
|
||||||
wheel_dir = args.wheel_dir
|
|
||||||
archs = args.require_archs
|
|
||||||
|
|
||||||
main(wheel_file, wheel_dir, archs)
|
|
||||||
@@ -1,135 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
# Calculate the trial jobs to run based on if we're in a PR or not.
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def set_output(key: str, value: str):
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter
|
|
||||||
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
|
|
||||||
print(f"{key}={value}", file=f)
|
|
||||||
|
|
||||||
|
|
||||||
IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
|
||||||
|
|
||||||
# First calculate the various trial jobs.
|
|
||||||
#
|
|
||||||
# For each type of test we only run on Py3.7 on PRs
|
|
||||||
|
|
||||||
trial_sqlite_tests = [
|
|
||||||
{
|
|
||||||
"python-version": "3.7",
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "all",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
if not IS_PR:
|
|
||||||
trial_sqlite_tests.extend(
|
|
||||||
{
|
|
||||||
"python-version": version,
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "all",
|
|
||||||
}
|
|
||||||
for version in ("3.8", "3.9", "3.10", "3.11")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
trial_postgres_tests = [
|
|
||||||
{
|
|
||||||
"python-version": "3.7",
|
|
||||||
"database": "postgres",
|
|
||||||
"postgres-version": "11",
|
|
||||||
"extras": "all",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
if not IS_PR:
|
|
||||||
trial_postgres_tests.append(
|
|
||||||
{
|
|
||||||
"python-version": "3.11",
|
|
||||||
"database": "postgres",
|
|
||||||
"postgres-version": "15",
|
|
||||||
"extras": "all",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
trial_no_extra_tests = [
|
|
||||||
{
|
|
||||||
"python-version": "3.7",
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
print("::group::Calculated trial jobs")
|
|
||||||
print(
|
|
||||||
json.dumps(
|
|
||||||
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests, indent=4
|
|
||||||
)
|
|
||||||
)
|
|
||||||
print("::endgroup::")
|
|
||||||
|
|
||||||
test_matrix = json.dumps(
|
|
||||||
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests
|
|
||||||
)
|
|
||||||
set_output("trial_test_matrix", test_matrix)
|
|
||||||
|
|
||||||
|
|
||||||
# First calculate the various sytest jobs.
|
|
||||||
#
|
|
||||||
# For each type of test we only run on focal on PRs
|
|
||||||
|
|
||||||
|
|
||||||
sytest_tests = [
|
|
||||||
{
|
|
||||||
"sytest-tag": "focal",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"sytest-tag": "focal",
|
|
||||||
"postgres": "postgres",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"sytest-tag": "focal",
|
|
||||||
"postgres": "multi-postgres",
|
|
||||||
"workers": "workers",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
if not IS_PR:
|
|
||||||
sytest_tests.extend(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"sytest-tag": "testing",
|
|
||||||
"postgres": "postgres",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"sytest-tag": "buster",
|
|
||||||
"postgres": "multi-postgres",
|
|
||||||
"workers": "workers",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
print("::group::Calculated sytest jobs")
|
|
||||||
print(json.dumps(sytest_tests, indent=4))
|
|
||||||
print("::endgroup::")
|
|
||||||
|
|
||||||
test_matrix = json.dumps(sytest_tests)
|
|
||||||
set_output("sytest_test_matrix", test_matrix)
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Fetches a version of complement which best matches the current build.
|
|
||||||
#
|
|
||||||
# The tarball is unpacked into `./complement`.
|
|
||||||
|
|
||||||
set -e
|
|
||||||
mkdir -p complement
|
|
||||||
|
|
||||||
# Pick an appropriate version of complement. Depending on whether this is a PR or release,
|
|
||||||
# etc. we need to use different fallbacks:
|
|
||||||
#
|
|
||||||
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
|
||||||
# for pull requests, otherwise GITHUB_REF).
|
|
||||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
|
||||||
# (GITHUB_BASE_REF for pull requests).
|
|
||||||
# 3. Use the default complement branch ("HEAD").
|
|
||||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
|
|
||||||
# Skip empty branch names and merge commits.
|
|
||||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
|
||||||
done
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# wraps `gotestfmt`, hiding output from successful packages unless
|
|
||||||
# all tests passed.
|
|
||||||
|
|
||||||
set -o pipefail
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# tee the test results to a log, whilst also piping them into gotestfmt,
|
|
||||||
# telling it to hide successful results, so that we can clearly see
|
|
||||||
# unsuccessful results.
|
|
||||||
tee complement.log | gotestfmt -hide successful-packages
|
|
||||||
|
|
||||||
# gotestfmt will exit non-zero if there were any failures, so if we got to this
|
|
||||||
# point, we must have had a successful result.
|
|
||||||
echo "All tests successful; showing all test results"
|
|
||||||
|
|
||||||
# Pipe the test results back through gotestfmt, showing all results.
|
|
||||||
# The log file consists of JSON lines giving the test results, interspersed
|
|
||||||
# with regular stdout lines (including reports of downloaded packages).
|
|
||||||
grep '^{"Time":' complement.log | gotestfmt
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# this script is run by GitHub Actions in a plain `focal` container; it
|
|
||||||
# - installs the minimal system requirements, and poetry;
|
|
||||||
# - patches the project definition file to refer to old versions only;
|
|
||||||
# - creates a venv with these old versions using poetry; and finally
|
|
||||||
# - invokes `trial` to run the tests with old deps.
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
|
||||||
export VIRTUALENV_NO_DOWNLOAD=1
|
|
||||||
|
|
||||||
# TODO: in the future, we could use an implementation of
|
|
||||||
# https://github.com/python-poetry/poetry/issues/3527
|
|
||||||
# https://github.com/pypa/pip/issues/8085
|
|
||||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
|
||||||
|
|
||||||
# Patch the project definitions in-place:
|
|
||||||
# - Replace all lower and tilde bounds with exact bounds
|
|
||||||
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
|
||||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
|
||||||
# - Use pyopenssl 17.0, which is the oldest version that works with
|
|
||||||
# a `cryptography` compiled against OpenSSL 1.1.
|
|
||||||
# - Omit systemd: we're not logging to journal here.
|
|
||||||
|
|
||||||
sed -i \
|
|
||||||
-e "s/[~>]=/==/g" \
|
|
||||||
-e '/^python = "^/!s/\^/==/g' \
|
|
||||||
-e "/psycopg2/d" \
|
|
||||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
|
||||||
-e '/systemd/d' \
|
|
||||||
pyproject.toml
|
|
||||||
|
|
||||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
|
||||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
|
||||||
# is more lax.
|
|
||||||
#
|
|
||||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
|
||||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
|
||||||
# dev tools.
|
|
||||||
|
|
||||||
pip install toml wheel
|
|
||||||
|
|
||||||
REMOVE_DEV_DEPENDENCIES="
|
|
||||||
import toml
|
|
||||||
with open('pyproject.toml', 'r') as f:
|
|
||||||
data = toml.loads(f.read())
|
|
||||||
|
|
||||||
del data['tool']['poetry']['dev-dependencies']
|
|
||||||
|
|
||||||
with open('pyproject.toml', 'w') as f:
|
|
||||||
toml.dump(data, f)
|
|
||||||
"
|
|
||||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
|
||||||
|
|
||||||
pip install poetry==1.2.0
|
|
||||||
poetry lock
|
|
||||||
|
|
||||||
echo "::group::Patched pyproject.toml"
|
|
||||||
cat pyproject.toml
|
|
||||||
echo "::endgroup::"
|
|
||||||
echo "::group::Lockfile after patch"
|
|
||||||
cat poetry.lock
|
|
||||||
echo "::endgroup::"
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
#
|
|
||||||
# Common commands to set up Complement's prerequisites in a GitHub Actions CI run.
|
|
||||||
#
|
|
||||||
# Must be called after Synapse has been checked out to `synapse/`.
|
|
||||||
#
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
|
|
||||||
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
|
|
||||||
|
|
||||||
block Set Go Version
|
|
||||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
|
||||||
|
|
||||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
|
||||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
|
||||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
|
||||||
echo "~/go/bin" >> $GITHUB_PATH
|
|
||||||
endblock
|
|
||||||
|
|
||||||
block Install Complement Dependencies
|
|
||||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
|
||||||
go install -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
|
||||||
endblock
|
|
||||||
|
|
||||||
block Install custom gotestfmt template
|
|
||||||
mkdir .gotestfmt/github -p
|
|
||||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
|
||||||
endblock
|
|
||||||
|
|
||||||
block Check out Complement
|
|
||||||
# Attempt to check out the same branch of Complement as the PR. If it
|
|
||||||
# doesn't exist, fallback to HEAD.
|
|
||||||
synapse/.ci/scripts/checkout_complement.sh
|
|
||||||
endblock
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Test for the export-data admin command against sqlite and postgres
|
|
||||||
|
|
||||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
|
||||||
# Expects `poetry` to be available on the `PATH`.
|
|
||||||
|
|
||||||
set -xe
|
|
||||||
cd "$(dirname "$0")/../.."
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
|
||||||
|
|
||||||
# Generate the server's signing key.
|
|
||||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
|
||||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
|
||||||
|
|
||||||
# Run the export-data command on the sqlite test database
|
|
||||||
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
|
||||||
--output-directory /tmp/export_data
|
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
|
||||||
dir="/tmp/export_data/rooms"
|
|
||||||
if [ -d "$dir" ]; then
|
|
||||||
echo "Command successful, this test passes"
|
|
||||||
else
|
|
||||||
echo "No output directories found, the command fails against a sqlite database."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
|
||||||
psql -c "CREATE DATABASE synapse"
|
|
||||||
|
|
||||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
|
||||||
echo "+++ Port SQLite3 databse to postgres"
|
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
# Run the export-data command on postgres database
|
|
||||||
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
|
||||||
--output-directory /tmp/export_data2
|
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
|
||||||
dir2="/tmp/export_data2/rooms"
|
|
||||||
if [ -d "$dir2" ]; then
|
|
||||||
echo "Command successful, this test passes"
|
|
||||||
else
|
|
||||||
echo "No output directories found, the command fails against a postgres database."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
#
|
|
||||||
# Test script for 'synapse_port_db'.
|
|
||||||
# - configures synapse and a postgres server.
|
|
||||||
# - runs the port script on a prepopulated test sqlite db. Checks that the
|
|
||||||
# return code is zero.
|
|
||||||
# - reruns the port script on the same sqlite db, targetting the same postgres db.
|
|
||||||
# Checks that the return code is zero.
|
|
||||||
# - runs the port script against a new sqlite db. Checks the return code is zero.
|
|
||||||
#
|
|
||||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
|
||||||
# Expects `poetry` to be available on the `PATH`.
|
|
||||||
|
|
||||||
set -xe -o pipefail
|
|
||||||
cd "$(dirname "$0")/../.."
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
|
||||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background updates.
|
|
||||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
|
||||||
psql -c "CREATE DATABASE synapse"
|
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against test database"
|
|
||||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
|
||||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
# We should be able to run twice against the same database.
|
|
||||||
echo "+++ Run synapse_port_db a second time"
|
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
#####
|
|
||||||
|
|
||||||
# Now do the same again, on an empty database.
|
|
||||||
|
|
||||||
echo "--- Prepare empty SQLite database"
|
|
||||||
|
|
||||||
# we do this by deleting the sqlite db, and then doing the same again.
|
|
||||||
rm .ci/test_db.db
|
|
||||||
|
|
||||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
|
||||||
|
|
||||||
# re-create the PostgreSQL database.
|
|
||||||
psql \
|
|
||||||
-c "DROP DATABASE synapse" \
|
|
||||||
-c "CREATE DATABASE synapse"
|
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against empty database"
|
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
echo "--- Create a brand new postgres database from schema"
|
|
||||||
cp .ci/postgres-config.yaml .ci/postgres-config-unported.yaml
|
|
||||||
sed -i -e 's/database: synapse/database: synapse_unported/' .ci/postgres-config-unported.yaml
|
|
||||||
psql -c "CREATE DATABASE synapse_unported"
|
|
||||||
poetry run update_synapse_database --database-config .ci/postgres-config-unported.yaml --run-background-updates
|
|
||||||
|
|
||||||
echo "+++ Comparing ported schema with unported schema"
|
|
||||||
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
|
||||||
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
|
|
||||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
|
|
||||||
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
|
||||||
diff -u unported.sql ported.sql | tee schema_diff
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
# Configuration file used for testing the 'synapse_port_db' script.
|
|
||||||
# Tells the 'update_database' script to connect to the test SQLite database to upgrade its
|
|
||||||
# schema and run background updates on it.
|
|
||||||
server_name: "localhost:8800"
|
|
||||||
|
|
||||||
signing_key_path: ".ci/test.signing.key"
|
|
||||||
|
|
||||||
report_stats: false
|
|
||||||
|
|
||||||
database:
|
|
||||||
name: "sqlite3"
|
|
||||||
args:
|
|
||||||
database: ".ci/test_db.db"
|
|
||||||
|
|
||||||
# Suppress the key server warning.
|
|
||||||
trusted_key_servers: []
|
|
||||||
BIN
.ci/test_db.db
BIN
.ci/test_db.db
Binary file not shown.
@@ -1,4 +0,0 @@
|
|||||||
---
|
|
||||||
title: CI run against Twisted trunk is failing
|
|
||||||
---
|
|
||||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
|
||||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
|
||||||
14
.codecov.yml
14
.codecov.yml
@@ -1,14 +0,0 @@
|
|||||||
comment: off
|
|
||||||
|
|
||||||
coverage:
|
|
||||||
status:
|
|
||||||
project:
|
|
||||||
default:
|
|
||||||
target: 0 # Target % coverage, can be auto. Turned off for now
|
|
||||||
threshold: null
|
|
||||||
base: auto
|
|
||||||
patch:
|
|
||||||
default:
|
|
||||||
target: 0
|
|
||||||
threshold: null
|
|
||||||
base: auto
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
[run]
|
|
||||||
branch = True
|
|
||||||
parallel = True
|
|
||||||
include=$TOP/synapse/*
|
|
||||||
data_file = $TOP/.coverage
|
|
||||||
|
|
||||||
[report]
|
|
||||||
precision = 2
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# ignore everything by default
|
|
||||||
*
|
|
||||||
|
|
||||||
# things to include
|
|
||||||
!docker
|
|
||||||
!synapse
|
|
||||||
!rust
|
|
||||||
!README.rst
|
|
||||||
!pyproject.toml
|
|
||||||
!poetry.lock
|
|
||||||
!Cargo.lock
|
|
||||||
!Cargo.toml
|
|
||||||
!build_rust.py
|
|
||||||
|
|
||||||
rust/target
|
|
||||||
synapse/*.so
|
|
||||||
|
|
||||||
**/__pycache__
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
# EditorConfig https://EditorConfig.org
|
|
||||||
|
|
||||||
# top-most EditorConfig file
|
|
||||||
root = true
|
|
||||||
|
|
||||||
# 4 space indentation
|
|
||||||
[*.{py,pyi}]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 4
|
|
||||||
max_line_length = 88
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
# Commits in this file will be removed from GitHub blame results.
|
|
||||||
#
|
|
||||||
# To use this file locally, use:
|
|
||||||
# git blame --ignore-revs-file="path/to/.git-blame-ignore-revs" <files>
|
|
||||||
#
|
|
||||||
# or configure the `blame.ignoreRevsFile` option in your git config.
|
|
||||||
#
|
|
||||||
# If ignoring a pull request that was not squash merged, only the merge
|
|
||||||
# commit needs to be put here. Child commits will be resolved from it.
|
|
||||||
|
|
||||||
# Run black (#3679).
|
|
||||||
8b3d9b6b199abb87246f982d5db356f1966db925
|
|
||||||
|
|
||||||
# Black reformatting (#5482).
|
|
||||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
|
||||||
|
|
||||||
# Target Python 3.5 with black (#8664).
|
|
||||||
aff1eb7c671b0a3813407321d2702ec46c71fa56
|
|
||||||
|
|
||||||
# Update black to 20.8b1 (#9381).
|
|
||||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
|
||||||
|
|
||||||
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
|
|
||||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
|
||||||
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -1,2 +0,0 @@
|
|||||||
# Automatically request reviews from the synapse-core team when a pull request comes in.
|
|
||||||
* @matrix-org/synapse-core
|
|
||||||
4
.github/FUNDING.yml
vendored
4
.github/FUNDING.yml
vendored
@@ -1,4 +0,0 @@
|
|||||||
# One username per supported platform and one custom link
|
|
||||||
patreon: matrixdotorg
|
|
||||||
liberapay: matrixdotorg
|
|
||||||
custom: https://paypal.me/matrixdotorg
|
|
||||||
5
.github/ISSUE_TEMPLATE.md
vendored
5
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,5 +0,0 @@
|
|||||||
**If you are looking for support** please ask in **#synapse:matrix.org**
|
|
||||||
(using a matrix.org account if necessary). We do not use GitHub issues for
|
|
||||||
support.
|
|
||||||
|
|
||||||
**If you want to report a security issue** please see https://matrix.org/security-disclosure-policy/
|
|
||||||
144
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
144
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -1,144 +0,0 @@
|
|||||||
name: Bug report
|
|
||||||
description: Create a report to help us improve
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
|
||||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
|
|
||||||
|
|
||||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
|
||||||
|
|
||||||
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
|
|
||||||
|
|
||||||
You can also preview your report before submitting it.
|
|
||||||
- type: textarea
|
|
||||||
id: description
|
|
||||||
attributes:
|
|
||||||
label: Description
|
|
||||||
description: Describe the problem that you are experiencing
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: reproduction_steps
|
|
||||||
attributes:
|
|
||||||
label: Steps to reproduce
|
|
||||||
description: |
|
|
||||||
Describe the series of steps that leads you to the problem.
|
|
||||||
|
|
||||||
Describe how what happens differs from what you expected.
|
|
||||||
placeholder: Tell us what you see!
|
|
||||||
value: |
|
|
||||||
- list the steps
|
|
||||||
- that reproduce the bug
|
|
||||||
- using hyphens as bullet points
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
---
|
|
||||||
|
|
||||||
**IMPORTANT**: please answer the following questions, to help us narrow down the problem.
|
|
||||||
- type: input
|
|
||||||
id: homeserver
|
|
||||||
attributes:
|
|
||||||
label: Homeserver
|
|
||||||
description: Which homeserver was this issue identified on? (matrix.org, another homeserver, etc)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: version
|
|
||||||
attributes:
|
|
||||||
label: Synapse Version
|
|
||||||
description: |
|
|
||||||
What version of Synapse is this homeserver running?
|
|
||||||
|
|
||||||
You can find the Synapse version by visiting https://yourserver.example.com/_matrix/federation/v1/version
|
|
||||||
|
|
||||||
or with this command:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
|
||||||
```
|
|
||||||
|
|
||||||
(You may need to replace `localhost:8008` if Synapse is not configured to listen on that port.)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
id: install_method
|
|
||||||
attributes:
|
|
||||||
label: Installation Method
|
|
||||||
options:
|
|
||||||
- Docker (matrixdotorg/synapse)
|
|
||||||
- Debian packages from packages.matrix.org
|
|
||||||
- pip (from PyPI)
|
|
||||||
- Other (please mention below)
|
|
||||||
- I don't know
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: database
|
|
||||||
attributes:
|
|
||||||
label: Database
|
|
||||||
description: |
|
|
||||||
Are you using SQLite or PostgreSQL? What's the version of your database?
|
|
||||||
|
|
||||||
If PostgreSQL, please also answer the following:
|
|
||||||
- are you using a single PostgreSQL server
|
|
||||||
or [separate servers for `main` and `state`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#databases)?
|
|
||||||
- have you previously ported from SQLite using the Synapse "portdb" script?
|
|
||||||
- have you previously restored from a backup?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
id: workers
|
|
||||||
attributes:
|
|
||||||
label: Workers
|
|
||||||
description: |
|
|
||||||
Are you running a single Synapse process, or are you running
|
|
||||||
[2 or more workers](https://matrix-org.github.io/synapse/latest/workers.html)?
|
|
||||||
options:
|
|
||||||
- Single process
|
|
||||||
- Multiple workers
|
|
||||||
- I don't know
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: platform
|
|
||||||
attributes:
|
|
||||||
label: Platform
|
|
||||||
description: |
|
|
||||||
Tell us about the environment in which your homeserver is operating...
|
|
||||||
e.g. distro, hardware, if it's running in a vm/container, etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: config
|
|
||||||
attributes:
|
|
||||||
label: Configuration
|
|
||||||
description: |
|
|
||||||
Do you have any unusual config options turned on? If so, please provide details.
|
|
||||||
|
|
||||||
- Experimental or undocumented features
|
|
||||||
- [Presence](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#presence)
|
|
||||||
- [Message retention](https://matrix-org.github.io/synapse/latest/message_retention_policies.html)
|
|
||||||
- [Synapse modules](https://matrix-org.github.io/synapse/latest/modules/index.html)
|
|
||||||
- type: textarea
|
|
||||||
id: logs
|
|
||||||
attributes:
|
|
||||||
label: Relevant log output
|
|
||||||
description: |
|
|
||||||
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
|
|
||||||
This will be automatically formatted into code, so there is no need for backticks (`\``).
|
|
||||||
|
|
||||||
Please be careful to remove any personal or private data.
|
|
||||||
|
|
||||||
**Bug reports are usually impossible to diagnose without logging.**
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: anything_else
|
|
||||||
attributes:
|
|
||||||
label: Anything else that would be useful to know?
|
|
||||||
9
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
vendored
9
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
vendored
@@ -1,9 +0,0 @@
|
|||||||
---
|
|
||||||
name: Feature request
|
|
||||||
about: Suggest an idea for this project
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Description:**
|
|
||||||
|
|
||||||
<!-- Describe here the feature you are requesting. -->
|
|
||||||
10
.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md
vendored
10
.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md
vendored
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
name: Support request
|
|
||||||
about: I need support for Synapse
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Please don't file github issues asking for support.
|
|
||||||
|
|
||||||
Instead, please join [`#synapse:matrix.org`](https://matrix.to/#/#synapse:matrix.org)
|
|
||||||
(from a matrix.org account if necessary), and ask there.
|
|
||||||
14
.github/PULL_REQUEST_TEMPLATE.md
vendored
14
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +0,0 @@
|
|||||||
### Pull Request Checklist
|
|
||||||
|
|
||||||
<!-- Please read https://matrix-org.github.io/synapse/latest/development/contributing_guide.html before submitting your pull request -->
|
|
||||||
|
|
||||||
* [ ] Pull request is based on the develop branch
|
|
||||||
* [ ] Pull request includes a [changelog file](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should:
|
|
||||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
|
||||||
- Use markdown where necessary, mostly for `code blocks`.
|
|
||||||
- End with either a period (.) or an exclamation mark (!).
|
|
||||||
- Start with a capital letter.
|
|
||||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
|
||||||
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
|
|
||||||
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
|
|
||||||
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
|
||||||
3
.github/SUPPORT.md
vendored
3
.github/SUPPORT.md
vendored
@@ -1,3 +0,0 @@
|
|||||||
[**#synapse:matrix.org**](https://matrix.to/#/#synapse:matrix.org) is the official support room for
|
|
||||||
Synapse, and can be accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html.
|
|
||||||
Please ask for support there, rather than filing github issues.
|
|
||||||
23
.github/dependabot.yml
vendored
23
.github/dependabot.yml
vendored
@@ -1,23 +0,0 @@
|
|||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- # "pip" is the correct setting for poetry, per https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
|
||||||
package-ecosystem: "pip"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
|
|
||||||
- package-ecosystem: "docker"
|
|
||||||
directory: "/docker"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
|
|
||||||
- package-ecosystem: "github-actions"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
|
|
||||||
- package-ecosystem: "cargo"
|
|
||||||
directory: "/"
|
|
||||||
versioning-strategy: "lockfile-only"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
49
.github/workflows/dependabot_changelog.yml
vendored
49
.github/workflows/dependabot_changelog.yml
vendored
@@ -1,49 +0,0 @@
|
|||||||
name: Write changelog for dependabot PR
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
- reopened # For debugging!
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
# Needed to be able to push the commit. See
|
|
||||||
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
|
|
||||||
# for a similar example
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
add-changelog:
|
|
||||||
runs-on: 'ubuntu-latest'
|
|
||||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.ref }}
|
|
||||||
- name: Write, commit and push changelog
|
|
||||||
env:
|
|
||||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
|
||||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
|
||||||
run: |
|
|
||||||
echo "${PR_TITLE}." > "changelog.d/${PR_NUMBER}".misc
|
|
||||||
git add changelog.d
|
|
||||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
|
||||||
git config user.name "GitHub Actions"
|
|
||||||
git commit -m "Changelog"
|
|
||||||
git push
|
|
||||||
shell: bash
|
|
||||||
# The `git push` above does not trigger CI on the dependabot PR.
|
|
||||||
#
|
|
||||||
# By default, workflows can't trigger other workflows when they're just using the
|
|
||||||
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
|
|
||||||
# recursive workflow loops by accident, because that'll get very expensive very
|
|
||||||
# quickly.) Instead, you have to manually call out to another workflow, or else
|
|
||||||
# make your changes (i.e. the `git push` above) using a personal access token.
|
|
||||||
# See
|
|
||||||
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
|
||||||
#
|
|
||||||
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
|
|
||||||
# See git commit history for previous attempts. If anyone desperately wants to try
|
|
||||||
# again in the future, make a matrix-bot account and use its access token to git push.
|
|
||||||
|
|
||||||
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
|
|
||||||
# are sufficiently locked down to dependabot only as above.
|
|
||||||
62
.github/workflows/docker.yml
vendored
62
.github/workflows/docker.yml
vendored
@@ -1,62 +0,0 @@
|
|||||||
# GitHub actions workflow which builds and publishes the docker images.
|
|
||||||
|
|
||||||
name: Build docker images
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags: ["v*"]
|
|
||||||
branches: [ master, main, develop ]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Set up QEMU
|
|
||||||
id: qemu
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
with:
|
|
||||||
platforms: arm64
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
|
|
||||||
- name: Inspect builder
|
|
||||||
run: docker buildx inspect
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Calculate docker image tag
|
|
||||||
id: set-tag
|
|
||||||
uses: docker/metadata-action@master
|
|
||||||
with:
|
|
||||||
images: matrixdotorg/synapse
|
|
||||||
flavor: |
|
|
||||||
latest=false
|
|
||||||
tags: |
|
|
||||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
|
||||||
type=pep440,pattern={{raw}}
|
|
||||||
|
|
||||||
- name: Build and push all platforms
|
|
||||||
uses: docker/build-push-action@v3
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
labels: "gitsha1=${{ github.sha }}"
|
|
||||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
|
||||||
file: "docker/Dockerfile"
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
|
|
||||||
# arm64 builds OOM without the git fetch setting. c.f.
|
|
||||||
# https://github.com/rust-lang/cargo/issues/10583
|
|
||||||
build-args: |
|
|
||||||
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
|
||||||
34
.github/workflows/docs-pr-netlify.yaml
vendored
34
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -1,34 +0,0 @@
|
|||||||
name: Deploy documentation PR preview
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_run:
|
|
||||||
workflows: [ "Prepare documentation PR preview" ]
|
|
||||||
types:
|
|
||||||
- completed
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
netlify:
|
|
||||||
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
|
||||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
|
||||||
- name: 📥 Download artifact
|
|
||||||
uses: dawidd6/action-download-artifact@bd10f381a96414ce2b13a11bfa89902ba7cea07f # v2.24.3
|
|
||||||
with:
|
|
||||||
workflow: docs-pr.yaml
|
|
||||||
run_id: ${{ github.event.workflow_run.id }}
|
|
||||||
name: book
|
|
||||||
path: book
|
|
||||||
|
|
||||||
- name: 📤 Deploy to Netlify
|
|
||||||
uses: matrix-org/netlify-pr-preview@v1
|
|
||||||
with:
|
|
||||||
path: book
|
|
||||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
|
||||||
branch: ${{ github.event.workflow_run.head_branch }}
|
|
||||||
revision: ${{ github.event.workflow_run.head_sha }}
|
|
||||||
token: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
|
||||||
site_id: ${{ secrets.NETLIFY_SITE_ID }}
|
|
||||||
desc: Documentation preview
|
|
||||||
deployment_env: PR Documentation Preview
|
|
||||||
60
.github/workflows/docs-pr.yaml
vendored
60
.github/workflows/docs-pr.yaml
vendored
@@ -1,60 +0,0 @@
|
|||||||
name: Prepare documentation PR preview
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- docs/**
|
|
||||||
- book.toml
|
|
||||||
- .github/workflows/docs-pr.yaml
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
pages:
|
|
||||||
name: GitHub Pages
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Setup mdbook
|
|
||||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
|
||||||
with:
|
|
||||||
mdbook-version: '0.4.17'
|
|
||||||
|
|
||||||
- name: Build the documentation
|
|
||||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
|
||||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
|
||||||
# as the default. Let's opt for the welcome page instead.
|
|
||||||
run: |
|
|
||||||
mdbook build
|
|
||||||
cp book/welcome_and_overview.html book/index.html
|
|
||||||
|
|
||||||
- name: Upload Artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: book
|
|
||||||
path: book
|
|
||||||
# We'll only use this in a workflow_run, then we're done with it
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
link-check:
|
|
||||||
name: Check links in documentation
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Setup mdbook
|
|
||||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
|
||||||
with:
|
|
||||||
mdbook-version: '0.4.17'
|
|
||||||
|
|
||||||
- name: Setup htmltest
|
|
||||||
run: |
|
|
||||||
wget https://github.com/wjdp/htmltest/releases/download/v0.17.0/htmltest_0.17.0_linux_amd64.tar.gz
|
|
||||||
echo '775c597ee74899d6002cd2d93076f897f4ba68686bceabe2e5d72e84c57bc0fb htmltest_0.17.0_linux_amd64.tar.gz' | sha256sum -c
|
|
||||||
tar zxf htmltest_0.17.0_linux_amd64.tar.gz
|
|
||||||
|
|
||||||
- name: Test links with htmltest
|
|
||||||
# Build the book with `./` as the site URL (to make checks on 404.html possible)
|
|
||||||
# Then run htmltest (without checking external links since that involves the network and is slow).
|
|
||||||
run: |
|
|
||||||
MDBOOK_OUTPUT__HTML__SITE_URL="./" mdbook build
|
|
||||||
./htmltest book --skip-external
|
|
||||||
65
.github/workflows/docs.yaml
vendored
65
.github/workflows/docs.yaml
vendored
@@ -1,65 +0,0 @@
|
|||||||
name: Deploy the documentation
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
# For bleeding-edge documentation
|
|
||||||
- develop
|
|
||||||
# For documentation specific to a release
|
|
||||||
- 'release-v*'
|
|
||||||
# stable docs
|
|
||||||
- master
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
pages:
|
|
||||||
name: GitHub Pages
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Setup mdbook
|
|
||||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
|
||||||
with:
|
|
||||||
mdbook-version: '0.4.17'
|
|
||||||
|
|
||||||
- name: Build the documentation
|
|
||||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
|
||||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
|
||||||
# as the default. Let's opt for the welcome page instead.
|
|
||||||
run: |
|
|
||||||
mdbook build
|
|
||||||
cp book/welcome_and_overview.html book/index.html
|
|
||||||
|
|
||||||
# Figure out the target directory.
|
|
||||||
#
|
|
||||||
# The target directory depends on the name of the branch
|
|
||||||
#
|
|
||||||
- name: Get the target directory name
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
# first strip the 'refs/heads/' prefix with some shell foo
|
|
||||||
branch="${GITHUB_REF#refs/heads/}"
|
|
||||||
|
|
||||||
case $branch in
|
|
||||||
release-*)
|
|
||||||
# strip 'release-' from the name for release branches.
|
|
||||||
branch="${branch#release-}"
|
|
||||||
;;
|
|
||||||
master)
|
|
||||||
# deploy to "latest" for the master branch.
|
|
||||||
branch="latest"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
# finally, set the 'branch-version' var.
|
|
||||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
# Deploy to the target directory.
|
|
||||||
- name: Deploy to gh pages
|
|
||||||
uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1
|
|
||||||
with:
|
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
publish_dir: ./book
|
|
||||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
|
||||||
216
.github/workflows/latest_deps.yml
vendored
216
.github/workflows/latest_deps.yml
vendored
@@ -1,216 +0,0 @@
|
|||||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
|
||||||
# dependencies which match the broad requirements. Since most CI runs are against
|
|
||||||
# the locked poetry environment, run specifically against the latest dependencies to
|
|
||||||
# know if there's an upcoming breaking change.
|
|
||||||
#
|
|
||||||
# As an overview this workflow:
|
|
||||||
# - checks out develop,
|
|
||||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
|
||||||
# - runs mypy and test suites in that checkout.
|
|
||||||
#
|
|
||||||
# Based on the twisted trunk CI job.
|
|
||||||
|
|
||||||
name: Latest dependencies
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: 0 7 * * *
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mypy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
|
||||||
# poetry-core versions), so we install with poetry.
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
poetry-version: "1.2.0"
|
|
||||||
extras: "all"
|
|
||||||
# Dump installed versions for debugging.
|
|
||||||
- run: poetry run pip list > before.txt
|
|
||||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
|
||||||
# `pip install matrix-synapse[all]` as closely as possible.
|
|
||||||
- run: poetry update --no-dev
|
|
||||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
|
||||||
- name: Remove warn_unused_ignores from mypy config
|
|
||||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
|
||||||
- run: poetry run mypy
|
|
||||||
trial:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- database: "sqlite"
|
|
||||||
- database: "postgres"
|
|
||||||
postgres-version: "14"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
run: |
|
|
||||||
docker run -d -p 5432:5432 \
|
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
|
||||||
postgres:${{ matrix.postgres-version }}
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- run: pip install .[all,test]
|
|
||||||
- name: Await PostgreSQL
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
timeout-minutes: 2
|
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
|
||||||
|
|
||||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
|
||||||
# (rather than use an editable install, which we no longer support). If we
|
|
||||||
# don't do this then python can't find the native lib.
|
|
||||||
- run: rm -rf synapse/
|
|
||||||
|
|
||||||
- run: python -m twisted.trial --jobs=2 tests
|
|
||||||
env:
|
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:testing
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- sytest-tag: focal
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: postgres
|
|
||||||
workers: workers
|
|
||||||
redis: redis
|
|
||||||
env:
|
|
||||||
POSTGRES: ${{ matrix.postgres && 1}}
|
|
||||||
WORKERS: ${{ matrix.workers && 1 }}
|
|
||||||
REDIS: ${{ matrix.redis && 1 }}
|
|
||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Ensure sytest runs `pip install`
|
|
||||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
|
||||||
run: rm /src/poetry.lock
|
|
||||||
working-directory: /src
|
|
||||||
- name: Prepare test blacklist
|
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
|
|
||||||
complement:
|
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- arrangement: monolith
|
|
||||||
database: SQLite
|
|
||||||
|
|
||||||
- arrangement: monolith
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
- arrangement: workers
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Run actions/checkout@v3 for synapse
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
path: synapse
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
set -o pipefail
|
|
||||||
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
|
||||||
shell: bash
|
|
||||||
name: Run Complement Tests
|
|
||||||
|
|
||||||
# Open an issue if the build fails, so we know about it.
|
|
||||||
# Only do this if we're not experimenting with this action in a PR.
|
|
||||||
open-issue:
|
|
||||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
|
|
||||||
needs:
|
|
||||||
# TODO: should mypy be included here? It feels more brittle than the others.
|
|
||||||
- mypy
|
|
||||||
- trial
|
|
||||||
- sytest
|
|
||||||
- complement
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
update_existing: true
|
|
||||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
|
||||||
74
.github/workflows/push_complement_image.yml
vendored
74
.github/workflows/push_complement_image.yml
vendored
@@ -1,74 +0,0 @@
|
|||||||
# This task does not run complement tests, see tests.yaml instead.
|
|
||||||
# This task does not build docker images for synapse for use on docker hub, see docker.yaml instead
|
|
||||||
|
|
||||||
name: Store complement-synapse image in ghcr.io
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ "master" ]
|
|
||||||
schedule:
|
|
||||||
- cron: '0 5 * * *'
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
branch:
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- develop
|
|
||||||
- master
|
|
||||||
|
|
||||||
# Only run this action once per pull request/branch; restart if a new commit arrives.
|
|
||||||
# C.f. https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
|
|
||||||
# and https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: Build and push complement image
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
steps:
|
|
||||||
- name: Checkout specific branch (debug build)
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
if: github.event_name == 'workflow_dispatch'
|
|
||||||
with:
|
|
||||||
ref: ${{ inputs.branch }}
|
|
||||||
- name: Checkout clean copy of develop (scheduled build)
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
if: github.event_name == 'schedule'
|
|
||||||
with:
|
|
||||||
ref: develop
|
|
||||||
- name: Checkout clean copy of master (on-push)
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
if: github.event_name == 'push'
|
|
||||||
with:
|
|
||||||
ref: master
|
|
||||||
- name: Login to registry
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Work out labels for complement image
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v4
|
|
||||||
with:
|
|
||||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
|
||||||
tags: |
|
|
||||||
type=schedule,pattern=nightly,enable=${{ github.event_name == 'schedule'}}
|
|
||||||
type=raw,value=develop,enable=${{ github.event_name == 'schedule' || inputs.branch == 'develop' }}
|
|
||||||
type=raw,value=latest,enable=${{ github.event_name == 'push' || inputs.branch == 'master' }}
|
|
||||||
type=sha,format=long
|
|
||||||
- name: Run scripts-dev/complement.sh to generate complement-synapse:latest image.
|
|
||||||
run: scripts-dev/complement.sh --build-only
|
|
||||||
- name: Tag and push generated image
|
|
||||||
run: |
|
|
||||||
for TAG in ${{ join(fromJson(steps.meta.outputs.json).tags, ' ') }}; do
|
|
||||||
echo "tag and push $TAG"
|
|
||||||
docker tag complement-synapse $TAG
|
|
||||||
docker push $TAG
|
|
||||||
done
|
|
||||||
209
.github/workflows/release-artifacts.yml
vendored
209
.github/workflows/release-artifacts.yml
vendored
@@ -1,209 +0,0 @@
|
|||||||
# GitHub actions workflow which builds the release artifacts.
|
|
||||||
|
|
||||||
name: Build release artifacts
|
|
||||||
|
|
||||||
on:
|
|
||||||
# we build on PRs and develop to (hopefully) get early warning
|
|
||||||
# of things breaking (but only build one set of debs)
|
|
||||||
pull_request:
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
|
|
||||||
# we do the full build on tags.
|
|
||||||
tags: ["v*"]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
get-distros:
|
|
||||||
name: "Calculate list of debian distros"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.x'
|
|
||||||
- id: set-distros
|
|
||||||
run: |
|
|
||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
|
||||||
dists='["debian:sid"]'
|
|
||||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
|
||||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
|
||||||
fi
|
|
||||||
echo "distros=$dists" >> "$GITHUB_OUTPUT"
|
|
||||||
# map the step outputs to job outputs
|
|
||||||
outputs:
|
|
||||||
distros: ${{ steps.set-distros.outputs.distros }}
|
|
||||||
|
|
||||||
# now build the packages with a matrix build.
|
|
||||||
build-debs:
|
|
||||||
needs: get-distros
|
|
||||||
name: "Build .deb packages"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
distro: ${{ fromJson(needs.get-distros.outputs.distros) }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
path: src
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
with:
|
|
||||||
install: true
|
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: /tmp/.buildx-cache
|
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-buildx-
|
|
||||||
|
|
||||||
- name: Set up python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.x'
|
|
||||||
|
|
||||||
- name: Build the packages
|
|
||||||
# see https://github.com/docker/build-push-action/issues/252
|
|
||||||
# for the cache magic here
|
|
||||||
run: |
|
|
||||||
./src/scripts-dev/build_debian_packages.py \
|
|
||||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
|
||||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
|
||||||
--docker-build-arg=--progress=plain \
|
|
||||||
--docker-build-arg=--load \
|
|
||||||
"${{ matrix.distro }}"
|
|
||||||
rm -rf /tmp/.buildx-cache
|
|
||||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: debs
|
|
||||||
path: debs/*
|
|
||||||
|
|
||||||
build-wheels:
|
|
||||||
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-20.04, macos-11]
|
|
||||||
arch: [x86_64, aarch64]
|
|
||||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
|
||||||
# It is not read by the rest of the workflow.
|
|
||||||
is_pr:
|
|
||||||
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
|
||||||
|
|
||||||
exclude:
|
|
||||||
# Don't build macos wheels on PR CI.
|
|
||||||
- is_pr: true
|
|
||||||
os: "macos-11"
|
|
||||||
# Don't build aarch64 wheels on mac.
|
|
||||||
- os: "macos-11"
|
|
||||||
arch: aarch64
|
|
||||||
# Don't build aarch64 wheels on PR CI.
|
|
||||||
- is_pr: true
|
|
||||||
arch: aarch64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
|
||||||
# here, because `python` on osx points to Python 2.7.
|
|
||||||
python-version: "3.x"
|
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
|
||||||
run: python -m pip install cibuildwheel==2.9.0 poetry==1.2.0
|
|
||||||
|
|
||||||
- name: Set up QEMU to emulate aarch64
|
|
||||||
if: matrix.arch == 'aarch64'
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
with:
|
|
||||||
platforms: arm64
|
|
||||||
|
|
||||||
- name: Build aarch64 wheels
|
|
||||||
if: matrix.arch == 'aarch64'
|
|
||||||
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Only build a single wheel on PR
|
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
|
||||||
run: echo "CIBW_BUILD="cp37-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Build wheels
|
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
|
||||||
env:
|
|
||||||
# Skip testing for platforms which various libraries don't have wheels
|
|
||||||
# for, and so need extra build deps.
|
|
||||||
CIBW_TEST_SKIP: pp3{7,9}-* *i686* *musl*
|
|
||||||
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
|
||||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
|
||||||
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: Wheel
|
|
||||||
path: ./wheelhouse/*.whl
|
|
||||||
|
|
||||||
build-sdist:
|
|
||||||
name: Build sdist
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- run: pip install build
|
|
||||||
|
|
||||||
- name: Build sdist
|
|
||||||
run: python -m build --sdist
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: Sdist
|
|
||||||
path: dist/*.tar.gz
|
|
||||||
|
|
||||||
|
|
||||||
# if it's a tag, create a release and attach the artifacts to it
|
|
||||||
attach-assets:
|
|
||||||
name: "Attach assets to release"
|
|
||||||
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
|
||||||
needs:
|
|
||||||
- build-debs
|
|
||||||
- build-wheels
|
|
||||||
- build-sdist
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Download all workflow run artifacts
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
- name: Build a tarball for the debs
|
|
||||||
run: tar -cvJf debs.tar.xz debs
|
|
||||||
- name: Attach to release
|
|
||||||
uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
Sdist/*
|
|
||||||
Wheel/*
|
|
||||||
debs.tar.xz
|
|
||||||
# if it's not already published, keep the release as a draft.
|
|
||||||
draft: true
|
|
||||||
# mark it as a prerelease if the tag contains 'rc'.
|
|
||||||
prerelease: ${{ contains(github.ref, 'rc') }}
|
|
||||||
575
.github/workflows/tests.yml
vendored
575
.github/workflows/tests.yml
vendored
@@ -1,575 +0,0 @@
|
|||||||
name: Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
pull_request:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
|
||||||
# don't modify Rust code.
|
|
||||||
changes:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
|
||||||
steps:
|
|
||||||
- uses: dorny/paths-filter@v2
|
|
||||||
id: filter
|
|
||||||
# We only check on PRs
|
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
|
||||||
with:
|
|
||||||
filters: |
|
|
||||||
rust:
|
|
||||||
- 'rust/**'
|
|
||||||
- 'Cargo.toml'
|
|
||||||
- 'Cargo.lock'
|
|
||||||
|
|
||||||
check-sampleconfig:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
extras: "all"
|
|
||||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
|
||||||
- run: poetry run scripts-dev/config-lint.sh
|
|
||||||
|
|
||||||
check-schema-delta:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
|
||||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
|
||||||
|
|
||||||
lint:
|
|
||||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v2"
|
|
||||||
with:
|
|
||||||
typechecking-extras: "all"
|
|
||||||
|
|
||||||
lint-crlf:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Check line endings
|
|
||||||
run: scripts-dev/check_line_terminators.sh
|
|
||||||
|
|
||||||
lint-newsfile:
|
|
||||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
|
||||||
- run: scripts-dev/check-newsfragment.sh
|
|
||||||
env:
|
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
|
||||||
|
|
||||||
lint-pydantic:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
extras: "all"
|
|
||||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
|
||||||
|
|
||||||
lint-clippy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
# There don't seem to be versioned releases of this action per se: for each rust
|
|
||||||
# version there is a branch which gets constantly rebased on top of master.
|
|
||||||
# We pin to a specific commit for paranoia's sake.
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: 1.58.1
|
|
||||||
components: clippy
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- run: cargo clippy -- -D warnings
|
|
||||||
|
|
||||||
# We also lint against a nightly rustc so that we can lint the benchmark
|
|
||||||
# suite, which requires a nightly compiler.
|
|
||||||
lint-clippy-nightly:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
# There don't seem to be versioned releases of this action per se: for each rust
|
|
||||||
# version there is a branch which gets constantly rebased on top of master.
|
|
||||||
# We pin to a specific commit for paranoia's sake.
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: nightly-2022-12-01
|
|
||||||
components: clippy
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- run: cargo clippy --all-features -- -D warnings
|
|
||||||
|
|
||||||
lint-rustfmt:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
# There don't seem to be versioned releases of this action per se: for each rust
|
|
||||||
# version there is a branch which gets constantly rebased on top of master.
|
|
||||||
# We pin to a specific commit for paranoia's sake.
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: 1.58.1
|
|
||||||
components: rustfmt
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- run: cargo fmt --check
|
|
||||||
|
|
||||||
# Dummy step to gate other tests on without repeating the whole list
|
|
||||||
linting-done:
|
|
||||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
|
||||||
needs:
|
|
||||||
- lint
|
|
||||||
- lint-crlf
|
|
||||||
- lint-newsfile
|
|
||||||
- lint-pydantic
|
|
||||||
- check-sampleconfig
|
|
||||||
- check-schema-delta
|
|
||||||
- lint-clippy
|
|
||||||
- lint-rustfmt
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- run: "true"
|
|
||||||
|
|
||||||
calculate-test-jobs:
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- id: get-matrix
|
|
||||||
run: .ci/scripts/calculate_jobs.py
|
|
||||||
outputs:
|
|
||||||
trial_test_matrix: ${{ steps.get-matrix.outputs.trial_test_matrix }}
|
|
||||||
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
|
||||||
|
|
||||||
trial:
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: calculate-test-jobs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
|
||||||
if: ${{ matrix.job.postgres-version }}
|
|
||||||
# 1. Mount postgres data files onto a tmpfs in-memory filesystem to reduce overhead of docker's overlayfs layer.
|
|
||||||
# 2. Expose the unix socket for postgres. This removes latency of using docker-proxy for connections.
|
|
||||||
run: |
|
|
||||||
docker run -d -p 5432:5432 \
|
|
||||||
--tmpfs /var/lib/postgres:rw,size=6144m \
|
|
||||||
--mount 'type=bind,src=/var/run/postgresql,dst=/var/run/postgresql' \
|
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
|
||||||
postgres:${{ matrix.job.postgres-version }}
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
# There don't seem to be versioned releases of this action per se: for each rust
|
|
||||||
# version there is a branch which gets constantly rebased on top of master.
|
|
||||||
# We pin to a specific commit for paranoia's sake.
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: 1.58.1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.job.python-version }}
|
|
||||||
extras: ${{ matrix.job.extras }}
|
|
||||||
- name: Await PostgreSQL
|
|
||||||
if: ${{ matrix.job.postgres-version }}
|
|
||||||
timeout-minutes: 2
|
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
|
||||||
- run: poetry run trial --jobs=6 tests
|
|
||||||
env:
|
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
|
||||||
SYNAPSE_POSTGRES_HOST: /var/run/postgresql
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-olddeps:
|
|
||||||
# Note: sqlite only; no postgres
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
# There don't seem to be versioned releases of this action per se: for each rust
|
|
||||||
# version there is a branch which gets constantly rebased on top of master.
|
|
||||||
# We pin to a specific commit for paranoia's sake.
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: 1.58.1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
|
||||||
# their build dependencies
|
|
||||||
- run: |
|
|
||||||
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
|
|
||||||
# Calculating the old-deps actually takes a bunch of time, so we cache the
|
|
||||||
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
|
|
||||||
# otherwise the `poetry install` step will error due to the poetry.lock
|
|
||||||
# file being outdated.
|
|
||||||
#
|
|
||||||
# This caches the output of `Prepare old deps`, which should generate the
|
|
||||||
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
|
|
||||||
- uses: actions/cache@v3
|
|
||||||
id: cache-poetry-old-deps
|
|
||||||
name: Cache poetry.lock
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
poetry.lock
|
|
||||||
pyproject.toml
|
|
||||||
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
|
|
||||||
- name: Prepare old deps
|
|
||||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
|
||||||
run: .ci/scripts/prepare_old_deps.sh
|
|
||||||
|
|
||||||
# We only now install poetry so that `setup-python-poetry` caches the
|
|
||||||
# right poetry.lock's dependencies.
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
extras: "all test"
|
|
||||||
|
|
||||||
- run: poetry run trial -j6 tests
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-pypy:
|
|
||||||
# Very slow; only run if the branch name includes 'pypy'
|
|
||||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
|
||||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["pypy-3.7"]
|
|
||||||
extras: ["all"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: ${{ matrix.extras }}
|
|
||||||
- run: poetry run trial --jobs=2 tests
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
needs: calculate-test-jobs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
env:
|
|
||||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
|
||||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
|
||||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
|
||||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
|
||||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Prepare test blacklist
|
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
# There don't seem to be versioned releases of this action per se: for each rust
|
|
||||||
# version there is a branch which gets constantly rebased on top of master.
|
|
||||||
# We pin to a specific commit for paranoia's sake.
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: 1.58.1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
export-data:
|
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: [linting-done, portdb]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: "postgres"
|
|
||||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
extras: "postgres"
|
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
|
||||||
env:
|
|
||||||
PGHOST: localhost
|
|
||||||
PGUSER: postgres
|
|
||||||
PGPASSWORD: postgres
|
|
||||||
PGDATABASE: postgres
|
|
||||||
|
|
||||||
|
|
||||||
portdb:
|
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- python-version: "3.7"
|
|
||||||
postgres-version: "11"
|
|
||||||
|
|
||||||
- python-version: "3.11"
|
|
||||||
postgres-version: "15"
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:${{ matrix.postgres-version }}
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: "postgres"
|
|
||||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Add PostgreSQL apt repository
|
|
||||||
# We need a version of pg_dump that can handle the version of
|
|
||||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
|
||||||
# behind new releases, so we have to use the PostreSQL apt repository.
|
|
||||||
# Steps taken from https://www.postgresql.org/download/linux/ubuntu/
|
|
||||||
run: |
|
|
||||||
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
|
||||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
|
||||||
sudo apt-get update
|
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: "postgres"
|
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
|
||||||
id: run_tester_script
|
|
||||||
env:
|
|
||||||
PGHOST: localhost
|
|
||||||
PGUSER: postgres
|
|
||||||
PGPASSWORD: postgres
|
|
||||||
PGDATABASE: postgres
|
|
||||||
- name: "Upload schema differences"
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
|
||||||
with:
|
|
||||||
name: Schema dumps
|
|
||||||
path: |
|
|
||||||
unported.sql
|
|
||||||
ported.sql
|
|
||||||
schema_diff
|
|
||||||
|
|
||||||
complement:
|
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- arrangement: monolith
|
|
||||||
database: SQLite
|
|
||||||
|
|
||||||
- arrangement: monolith
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
- arrangement: workers
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Run actions/checkout@v3 for synapse
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
path: synapse
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
# There don't seem to be versioned releases of this action per se: for each rust
|
|
||||||
# version there is a branch which gets constantly rebased on top of master.
|
|
||||||
# We pin to a specific commit for paranoia's sake.
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: 1.58.1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
set -o pipefail
|
|
||||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
|
||||||
shell: bash
|
|
||||||
name: Run Complement Tests
|
|
||||||
|
|
||||||
cargo-test:
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- linting-done
|
|
||||||
- changes
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
# There don't seem to be versioned releases of this action per se: for each rust
|
|
||||||
# version there is a branch which gets constantly rebased on top of master.
|
|
||||||
# We pin to a specific commit for paranoia's sake.
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: 1.58.1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- run: cargo test
|
|
||||||
|
|
||||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
|
||||||
tests-done:
|
|
||||||
if: ${{ always() }}
|
|
||||||
needs:
|
|
||||||
- trial
|
|
||||||
- trial-olddeps
|
|
||||||
- sytest
|
|
||||||
- export-data
|
|
||||||
- portdb
|
|
||||||
- complement
|
|
||||||
- cargo-test
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: matrix-org/done-action@v2
|
|
||||||
with:
|
|
||||||
needs: ${{ toJSON(needs) }}
|
|
||||||
|
|
||||||
# The newsfile lint may be skipped on non PR builds
|
|
||||||
# Cargo test is skipped if there is no changes on Rust code
|
|
||||||
skippable: |
|
|
||||||
lint-newsfile
|
|
||||||
cargo-test
|
|
||||||
15
.github/workflows/triage-incoming.yml
vendored
15
.github/workflows/triage-incoming.yml
vendored
@@ -1,15 +0,0 @@
|
|||||||
name: Move new issues into the issue triage board
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [ opened ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
triage:
|
|
||||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v1
|
|
||||||
with:
|
|
||||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
|
||||||
content_id: ${{ github.event.issue.node_id }}
|
|
||||||
secrets:
|
|
||||||
github_access_token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
|
|
||||||
44
.github/workflows/triage_labelled.yml
vendored
44
.github/workflows/triage_labelled.yml
vendored
@@ -1,44 +0,0 @@
|
|||||||
name: Move labelled issues to correct projects
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [ labeled ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
move_needs_info:
|
|
||||||
name: Move X-Needs-Info on the triage board
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: >
|
|
||||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
|
||||||
steps:
|
|
||||||
- uses: actions/add-to-project@main
|
|
||||||
id: add_project
|
|
||||||
with:
|
|
||||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
|
||||||
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
- name: Set status
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
run: |
|
|
||||||
gh api graphql -f query='
|
|
||||||
mutation(
|
|
||||||
$project: ID!
|
|
||||||
$item: ID!
|
|
||||||
$fieldid: ID!
|
|
||||||
$columnid: String!
|
|
||||||
) {
|
|
||||||
updateProjectV2ItemFieldValue(
|
|
||||||
input: {
|
|
||||||
projectId: $project
|
|
||||||
itemId: $item
|
|
||||||
fieldId: $fieldid
|
|
||||||
value: {
|
|
||||||
singleSelectOptionId: $columnid
|
|
||||||
}
|
|
||||||
}
|
|
||||||
) {
|
|
||||||
projectV2Item {
|
|
||||||
id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
|
||||||
182
.github/workflows/twisted_trunk.yml
vendored
182
.github/workflows/twisted_trunk.yml
vendored
@@ -1,182 +0,0 @@
|
|||||||
name: Twisted Trunk
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: 0 8 * * *
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mypy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
extras: "all"
|
|
||||||
- run: |
|
|
||||||
poetry remove twisted
|
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry install --no-interaction --extras "all test"
|
|
||||||
- name: Remove warn_unused_ignores from mypy config
|
|
||||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
|
||||||
- run: poetry run mypy
|
|
||||||
|
|
||||||
trial:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
extras: "all test"
|
|
||||||
- run: |
|
|
||||||
poetry remove twisted
|
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry install --no-interaction --extras "all test"
|
|
||||||
- run: poetry run trial --jobs 2 tests
|
|
||||||
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:buster
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Patch dependencies
|
|
||||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
|
||||||
# but the sytest-synapse container expects it to be in /venv/.
|
|
||||||
# We symlink it before running poetry so that poetry actually
|
|
||||||
# ends up installing to `/venv`.
|
|
||||||
run: |
|
|
||||||
ln -s -T /venv /src/.venv
|
|
||||||
poetry remove twisted
|
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry install --no-interaction --extras "all test"
|
|
||||||
working-directory: /src
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
env:
|
|
||||||
# Use offline mode to avoid reinstalling the pinned version of
|
|
||||||
# twisted.
|
|
||||||
OFFLINE: 1
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
complement:
|
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- arrangement: monolith
|
|
||||||
database: SQLite
|
|
||||||
|
|
||||||
- arrangement: monolith
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
- arrangement: workers
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Run actions/checkout@v3 for synapse
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
path: synapse
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
|
||||||
|
|
||||||
# This step is specific to the 'Twisted trunk' test run:
|
|
||||||
- name: Patch dependencies
|
|
||||||
run: |
|
|
||||||
set -x
|
|
||||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
|
||||||
pipx install poetry==1.2.0
|
|
||||||
|
|
||||||
poetry remove -n twisted
|
|
||||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry lock --no-update
|
|
||||||
working-directory: synapse
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
set -o pipefail
|
|
||||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
|
||||||
shell: bash
|
|
||||||
name: Run Complement Tests
|
|
||||||
|
|
||||||
# open an issue if the build fails, so we know about it.
|
|
||||||
open-issue:
|
|
||||||
if: failure()
|
|
||||||
needs:
|
|
||||||
- mypy
|
|
||||||
- trial
|
|
||||||
- sytest
|
|
||||||
- complement
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
update_existing: true
|
|
||||||
filename: .ci/twisted_trunk_build_failed_issue_template.md
|
|
||||||
93
.gitignore
vendored
93
.gitignore
vendored
@@ -1,71 +1,44 @@
|
|||||||
# filename patterns
|
*.pyc
|
||||||
*~
|
|
||||||
.*.swp
|
.*.swp
|
||||||
.#*
|
|
||||||
*.deb
|
|
||||||
*.egg
|
|
||||||
*.egg-info
|
|
||||||
*.lock
|
|
||||||
*.py[cod]
|
|
||||||
*.snap
|
|
||||||
*.tac
|
|
||||||
_trial_temp/
|
|
||||||
_trial_temp*/
|
|
||||||
/out
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
__pycache__/
|
_trial_temp/
|
||||||
|
logs/
|
||||||
|
dbs/
|
||||||
|
*.egg
|
||||||
|
dist/
|
||||||
|
docs/build/
|
||||||
|
*.egg-info
|
||||||
|
|
||||||
# We do want the poetry and cargo lockfile.
|
cmdclient_config.json
|
||||||
!poetry.lock
|
homeserver*.db
|
||||||
!Cargo.lock
|
homeserver*.log
|
||||||
|
homeserver*.pid
|
||||||
|
homeserver*.yaml
|
||||||
|
|
||||||
# stuff that is likely to exist when you run a server locally
|
*.signing.key
|
||||||
/*.db
|
*.tls.crt
|
||||||
/*.log
|
*.tls.dh
|
||||||
/*.log.*
|
*.tls.key
|
||||||
/*.log.config
|
|
||||||
/*.pid
|
|
||||||
/.python-version
|
|
||||||
/*.signing.key
|
|
||||||
/env/
|
|
||||||
/.venv*/
|
|
||||||
/homeserver*.yaml
|
|
||||||
/logs
|
|
||||||
/media_store/
|
|
||||||
/uploads
|
|
||||||
|
|
||||||
# For direnv users
|
.coverage
|
||||||
/.envrc
|
htmlcov
|
||||||
.direnv/
|
|
||||||
|
|
||||||
# IDEs
|
demo/*.db
|
||||||
/.idea/
|
demo/*.log
|
||||||
/.ropeproject/
|
demo/*.log.*
|
||||||
/.vscode/
|
demo/*.pid
|
||||||
|
demo/media_store.*
|
||||||
|
demo/etc
|
||||||
|
|
||||||
# build products
|
uploads
|
||||||
!/.coveragerc
|
|
||||||
/.coverage*
|
|
||||||
/.mypy_cache/
|
|
||||||
/.tox
|
|
||||||
/.tox-pg-container
|
|
||||||
/build/
|
|
||||||
/coverage.*
|
|
||||||
/dist/
|
|
||||||
/docs/build/
|
|
||||||
/htmlcov
|
|
||||||
/pip-wheel-metadata/
|
|
||||||
|
|
||||||
# docs
|
.idea/
|
||||||
book/
|
media_store/
|
||||||
|
|
||||||
# complement
|
*.tac
|
||||||
/complement-*
|
|
||||||
/master.tar.gz
|
|
||||||
|
|
||||||
# rust
|
build/
|
||||||
/target/
|
|
||||||
/synapse/*.so
|
|
||||||
|
|
||||||
# Poetry will create a setup.py, which we don't want to include.
|
localhost-800*/
|
||||||
/setup.py
|
static/client/register/register_config.js
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
group_imports = "StdExternalCrate"
|
|
||||||
51
AUTHORS.rst
51
AUTHORS.rst
@@ -1,51 +0,0 @@
|
|||||||
The following is an incomplete list of people outside the core team who have
|
|
||||||
contributed to Synapse. It is no longer maintained: more recent contributions
|
|
||||||
are listed in the `changelog <CHANGES.md>`_.
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
Turned to Dust <dwinslow86 at gmail.com>
|
|
||||||
* ArchLinux installation instructions
|
|
||||||
|
|
||||||
Brabo <brabo at riseup.net>
|
|
||||||
* Installation instruction fixes
|
|
||||||
|
|
||||||
Ivan Shapovalov <intelfx100 at gmail.com>
|
|
||||||
* contrib/systemd: a sample systemd unit file and a logger configuration
|
|
||||||
|
|
||||||
Eric Myhre <hash at exultant.us>
|
|
||||||
* Fix bug where ``media_store_path`` config option was ignored by v0 content
|
|
||||||
repository API.
|
|
||||||
|
|
||||||
Muthu Subramanian <muthu.subramanian.karunanidhi at ericsson.com>
|
|
||||||
* Add SAML2 support for registration and login.
|
|
||||||
|
|
||||||
Steven Hammerton <steven.hammerton at openmarket.com>
|
|
||||||
* Add CAS support for registration and login.
|
|
||||||
|
|
||||||
Mads Robin Christensen <mads at v42 dot dk>
|
|
||||||
* CentOS 7 installation instructions.
|
|
||||||
|
|
||||||
Florent Violleau <floviolleau at gmail dot com>
|
|
||||||
* Add Raspberry Pi installation instructions and general troubleshooting items
|
|
||||||
|
|
||||||
Niklas Riekenbrauck <nikriek at gmail dot.com>
|
|
||||||
* Add JWT support for registration and login
|
|
||||||
|
|
||||||
Christoph Witzany <christoph at web.crofting.com>
|
|
||||||
* Add LDAP support for authentication
|
|
||||||
|
|
||||||
Pierre Jaury <pierre at jaury.eu>
|
|
||||||
* Docker packaging
|
|
||||||
|
|
||||||
Serban Constantin <serban.constantin at gmail dot com>
|
|
||||||
* Small bug fix
|
|
||||||
|
|
||||||
Joseph Weston <joseph at weston.cloud>
|
|
||||||
* Add admin API for querying HS version
|
|
||||||
|
|
||||||
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
|
||||||
* Documentation improvements
|
|
||||||
|
|
||||||
Werner Sembach <werner.sembach at fau dot de>
|
|
||||||
* Automatically remove a group/community when it is empty
|
|
||||||
2875
CHANGES.md
2875
CHANGES.md
File diff suppressed because it is too large
Load Diff
449
CHANGES.rst
Normal file
449
CHANGES.rst
Normal file
@@ -0,0 +1,449 @@
|
|||||||
|
Changes in synapse v0.8.0 (2015-03-06)
|
||||||
|
======================================
|
||||||
|
|
||||||
|
General:
|
||||||
|
|
||||||
|
* Add support for registration fallback. This is a page hosted on the server
|
||||||
|
which allows a user to register for an account, regardless of what client
|
||||||
|
they are using (e.g. mobile devices).
|
||||||
|
|
||||||
|
* Added new default push rules and made them configurable by clients:
|
||||||
|
|
||||||
|
* Suppress all notice messages.
|
||||||
|
* Notify when invited to a new room.
|
||||||
|
* Notify for messages that don't match any rule.
|
||||||
|
* Notify on incoming call.
|
||||||
|
|
||||||
|
Federation:
|
||||||
|
|
||||||
|
* Added per host server side rate-limiting of incoming federation requests.
|
||||||
|
* Added a ``/get_missing_events/`` API to federation to reduce number of
|
||||||
|
``/events/`` requests.
|
||||||
|
|
||||||
|
Configuration:
|
||||||
|
|
||||||
|
* Added configuration option to disable registration:
|
||||||
|
``disable_registration``.
|
||||||
|
* Added configuration option to change soft limit of number of open file
|
||||||
|
descriptors: ``soft_file_limit``.
|
||||||
|
* Make ``tls_private_key_path`` optional when running with ``no_tls``.
|
||||||
|
|
||||||
|
Application services:
|
||||||
|
|
||||||
|
* Application services can now poll on the CS API ``/events`` for their events,
|
||||||
|
by providing their application service ``access_token``.
|
||||||
|
* Added exclusive namespace support to application services API.
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse v0.7.1 (2015-02-19)
|
||||||
|
======================================
|
||||||
|
|
||||||
|
* Initial alpha implementation of parts of the Application Services API.
|
||||||
|
Including:
|
||||||
|
|
||||||
|
- AS Registration / Unregistration
|
||||||
|
- User Query API
|
||||||
|
- Room Alias Query API
|
||||||
|
- Push transport for receiving events.
|
||||||
|
- User/Alias namespace admin control
|
||||||
|
|
||||||
|
* Add cache when fetching events from remote servers to stop repeatedly
|
||||||
|
fetching events with bad signatures.
|
||||||
|
* Respect the per remote server retry scheme when fetching both events and
|
||||||
|
server keys to reduce the number of times we send requests to dead servers.
|
||||||
|
* Inform remote servers when the local server fails to handle a received event.
|
||||||
|
* Turn off python bytecode generation due to problems experienced when
|
||||||
|
upgrading from previous versions.
|
||||||
|
|
||||||
|
Changes in synapse v0.7.0 (2015-02-12)
|
||||||
|
======================================
|
||||||
|
|
||||||
|
* Add initial implementation of the query auth federation API, allowing
|
||||||
|
servers to agree on whether an event should be allowed or rejected.
|
||||||
|
* Persist events we have rejected from federation, fixing the bug where
|
||||||
|
servers would keep requesting the same events.
|
||||||
|
* Various federation performance improvements, including:
|
||||||
|
|
||||||
|
- Add in memory caches on queries such as:
|
||||||
|
|
||||||
|
* Computing the state of a room at a point in time, used for
|
||||||
|
authorization on federation requests.
|
||||||
|
* Fetching events from the database.
|
||||||
|
* User's room membership, used for authorizing presence updates.
|
||||||
|
|
||||||
|
- Upgraded JSON library to improve parsing and serialisation speeds.
|
||||||
|
|
||||||
|
* Add default avatars to new user accounts using pydenticon library.
|
||||||
|
* Correctly time out federation requests.
|
||||||
|
* Retry federation requests against different servers.
|
||||||
|
* Add support for push and push rules.
|
||||||
|
* Add alpha versions of proposed new CSv2 APIs, including ``/sync`` API.
|
||||||
|
|
||||||
|
Changes in synapse 0.6.1 (2015-01-07)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
* Major optimizations to improve performance of initial sync and event sending
|
||||||
|
in large rooms (by up to 10x)
|
||||||
|
* Media repository now includes a Content-Length header on media downloads.
|
||||||
|
* Improve quality of thumbnails by changing resizing algorithm.
|
||||||
|
|
||||||
|
Changes in synapse 0.6.0 (2014-12-16)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
* Add new API for media upload and download that supports thumbnailing.
|
||||||
|
* Replicate media uploads over multiple homeservers so media is always served
|
||||||
|
to clients from their local homeserver. This obsoletes the
|
||||||
|
--content-addr parameter and confusion over accessing content directly
|
||||||
|
from remote homeservers.
|
||||||
|
* Implement exponential backoff when retrying federation requests when
|
||||||
|
sending to remote homeservers which are offline.
|
||||||
|
* Implement typing notifications.
|
||||||
|
* Fix bugs where we sent events with invalid signatures due to bugs where
|
||||||
|
we incorrectly persisted events.
|
||||||
|
* Improve performance of database queries involving retrieving events.
|
||||||
|
|
||||||
|
Changes in synapse 0.5.4a (2014-12-13)
|
||||||
|
======================================
|
||||||
|
|
||||||
|
* Fix bug while generating the error message when a file path specified in
|
||||||
|
the config doesn't exist.
|
||||||
|
|
||||||
|
Changes in synapse 0.5.4 (2014-12-03)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
* Fix presence bug where some rooms did not display presence updates for
|
||||||
|
remote users.
|
||||||
|
* Do not log SQL timing log lines when started with "-v"
|
||||||
|
* Fix potential memory leak.
|
||||||
|
|
||||||
|
Changes in synapse 0.5.3c (2014-12-02)
|
||||||
|
======================================
|
||||||
|
|
||||||
|
* Change the default value for the `content_addr` option to use the HTTP
|
||||||
|
listener, as by default the HTTPS listener will be using a self-signed
|
||||||
|
certificate.
|
||||||
|
|
||||||
|
Changes in synapse 0.5.3 (2014-11-27)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
* Fix bug that caused joining a remote room to fail if a single event was not
|
||||||
|
signed correctly.
|
||||||
|
* Fix bug which caused servers to continuously try and fetch events from other
|
||||||
|
servers.
|
||||||
|
|
||||||
|
Changes in synapse 0.5.2 (2014-11-26)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Fix major bug that caused rooms to disappear from peoples initial sync.
|
||||||
|
|
||||||
|
Changes in synapse 0.5.1 (2014-11-26)
|
||||||
|
=====================================
|
||||||
|
See UPGRADES.rst for specific instructions on how to upgrade.
|
||||||
|
|
||||||
|
* Fix bug where we served up an Event that did not match its signatures.
|
||||||
|
* Fix regression where we no longer correctly handled the case where a
|
||||||
|
homeserver receives an event for a room it doesn't recognise (but is in.)
|
||||||
|
|
||||||
|
Changes in synapse 0.5.0 (2014-11-19)
|
||||||
|
=====================================
|
||||||
|
This release includes changes to the federation protocol and client-server API
|
||||||
|
that is not backwards compatible.
|
||||||
|
|
||||||
|
This release also changes the internal database schemas and so requires servers to
|
||||||
|
drop their current history. See UPGRADES.rst for details.
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Add authentication and authorization to the federation protocol. Events are
|
||||||
|
now signed by their originating homeservers.
|
||||||
|
* Implement the new authorization model for rooms.
|
||||||
|
* Split out web client into a seperate repository: matrix-angular-sdk.
|
||||||
|
* Change the structure of PDUs.
|
||||||
|
* Fix bug where user could not join rooms via an alias containing 4-byte
|
||||||
|
UTF-8 characters.
|
||||||
|
* Merge concept of PDUs and Events internally.
|
||||||
|
* Improve logging by adding request ids to log lines.
|
||||||
|
* Implement a very basic room initial sync API.
|
||||||
|
* Implement the new invite/join federation APIs.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* The webclient has been moved to a seperate repository.
|
||||||
|
|
||||||
|
Changes in synapse 0.4.2 (2014-10-31)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Fix bugs where we did not notify users of correct presence updates.
|
||||||
|
* Fix bug where we did not handle sub second event stream timeouts.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Add ability to click on messages to see JSON.
|
||||||
|
* Add ability to redact messages.
|
||||||
|
* Add ability to view and edit all room state JSON.
|
||||||
|
* Handle incoming redactions.
|
||||||
|
* Improve feedback on errors.
|
||||||
|
* Fix bugs in mobile CSS.
|
||||||
|
* Fix bugs with desktop notifications.
|
||||||
|
|
||||||
|
Changes in synapse 0.4.1 (2014-10-17)
|
||||||
|
=====================================
|
||||||
|
Webclient:
|
||||||
|
* Fix bug with display of timestamps.
|
||||||
|
|
||||||
|
Changes in synpase 0.4.0 (2014-10-17)
|
||||||
|
=====================================
|
||||||
|
This release includes changes to the federation protocol and client-server API
|
||||||
|
that is not backwards compatible.
|
||||||
|
|
||||||
|
The Matrix specification has been moved to a separate git repository:
|
||||||
|
http://github.com/matrix-org/matrix-doc
|
||||||
|
|
||||||
|
You will also need an updated syutil and config. See UPGRADES.rst.
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Sign federation transactions to assert strong identity over federation.
|
||||||
|
* Rename timestamp keys in PDUs and events from 'ts' and 'hsob_ts' to 'origin_server_ts'.
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse 0.3.4 (2014-09-25)
|
||||||
|
=====================================
|
||||||
|
This version adds support for using a TURN server. See docs/turn-howto.rst on
|
||||||
|
how to set one up.
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Add support for redaction of messages.
|
||||||
|
* Fix bug where inviting a user on a remote home server could take up to
|
||||||
|
20-30s.
|
||||||
|
* Implement a get current room state API.
|
||||||
|
* Add support specifying and retrieving turn server configuration.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Add button to send messages to users from the home page.
|
||||||
|
* Add support for using TURN for VoIP calls.
|
||||||
|
* Show display name change messages.
|
||||||
|
* Fix bug where the client didn't get the state of a newly joined room
|
||||||
|
until after it has been refreshed.
|
||||||
|
* Fix bugs with tab complete.
|
||||||
|
* Fix bug where holding down the down arrow caused chrome to chew 100% CPU.
|
||||||
|
* Fix bug where desktop notifications occasionally used "Undefined" as the
|
||||||
|
display name.
|
||||||
|
* Fix more places where we sometimes saw room IDs incorrectly.
|
||||||
|
* Fix bug which caused lag when entering text in the text box.
|
||||||
|
|
||||||
|
Changes in synapse 0.3.3 (2014-09-22)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Fix bug where you continued to get events for rooms you had left.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Add support for video calls with basic UI.
|
||||||
|
* Fix bug where one to one chats were named after your display name rather
|
||||||
|
than the other person's.
|
||||||
|
* Fix bug which caused lag when typing in the textarea.
|
||||||
|
* Refuse to run on browsers we know won't work.
|
||||||
|
* Trigger pagination when joining new rooms.
|
||||||
|
* Fix bug where we sometimes didn't display invitations in recents.
|
||||||
|
* Automatically join room when accepting a VoIP call.
|
||||||
|
* Disable outgoing and reject incoming calls on browsers we don't support
|
||||||
|
VoIP in.
|
||||||
|
* Don't display desktop notifications for messages in the room you are
|
||||||
|
non-idle and speaking in.
|
||||||
|
|
||||||
|
Changes in synapse 0.3.2 (2014-09-18)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Fix bug where an empty "bing words" list in old accounts didn't send
|
||||||
|
notifications when it should have done.
|
||||||
|
|
||||||
|
Changes in synapse 0.3.1 (2014-09-18)
|
||||||
|
=====================================
|
||||||
|
This is a release to hotfix v0.3.0 to fix two regressions.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Fix a regression where we sometimes displayed duplicate events.
|
||||||
|
* Fix a regression where we didn't immediately remove rooms you were
|
||||||
|
banned in from the recents list.
|
||||||
|
|
||||||
|
Changes in synapse 0.3.0 (2014-09-18)
|
||||||
|
=====================================
|
||||||
|
See UPGRADE for information about changes to the client server API, including
|
||||||
|
breaking backwards compatibility with VoIP calls and registration API.
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* When a user changes their displayname or avatar the server will now update
|
||||||
|
all their join states to reflect this.
|
||||||
|
* The server now adds "age" key to events to indicate how old they are. This
|
||||||
|
is clock independent, so at no point does any server or webclient have to
|
||||||
|
assume their clock is in sync with everyone else.
|
||||||
|
* Fix bug where we didn't correctly pull in missing PDUs.
|
||||||
|
* Fix bug where prev_content key wasn't always returned.
|
||||||
|
* Add support for password resets.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Improve page content loading.
|
||||||
|
* Join/parts now trigger desktop notifications.
|
||||||
|
* Always show room aliases in the UI if one is present.
|
||||||
|
* No longer show user-count in the recents side panel.
|
||||||
|
* Add up & down arrow support to the text box for message sending to step
|
||||||
|
through your sent history.
|
||||||
|
* Don't display notifications for our own messages.
|
||||||
|
* Emotes are now formatted correctly in desktop notifications.
|
||||||
|
* The recents list now differentiates between public & private rooms.
|
||||||
|
* Fix bug where when switching between rooms the pagination flickered before
|
||||||
|
the view jumped to the bottom of the screen.
|
||||||
|
* Add bing word support.
|
||||||
|
|
||||||
|
Registration API:
|
||||||
|
* The registration API has been overhauled to function like the login API. In
|
||||||
|
practice, this means registration requests must now include the following:
|
||||||
|
'type':'m.login.password'. See UPGRADE for more information on this.
|
||||||
|
* The 'user_id' key has been renamed to 'user' to better match the login API.
|
||||||
|
* There is an additional login type: 'm.login.email.identity'.
|
||||||
|
* The command client and web client have been updated to reflect these changes.
|
||||||
|
|
||||||
|
Changes in synapse 0.2.3 (2014-09-12)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Fix bug where we stopped sending events to remote home servers if a
|
||||||
|
user from that home server left, even if there were some still in the
|
||||||
|
room.
|
||||||
|
* Fix bugs in the state conflict resolution where it was incorrectly
|
||||||
|
rejecting events.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Display room names and topics.
|
||||||
|
* Allow setting/editing of room names and topics.
|
||||||
|
* Display information about rooms on the main page.
|
||||||
|
* Handle ban and kick events in real time.
|
||||||
|
* VoIP UI and reliability improvements.
|
||||||
|
* Add glare support for VoIP.
|
||||||
|
* Improvements to initial startup speed.
|
||||||
|
* Don't display duplicate join events.
|
||||||
|
* Local echo of messages.
|
||||||
|
* Differentiate sending and sent of local echo.
|
||||||
|
* Various minor bug fixes.
|
||||||
|
|
||||||
|
Changes in synapse 0.2.2 (2014-09-06)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* When the server returns state events it now also includes the previous
|
||||||
|
content.
|
||||||
|
* Add support for inviting people when creating a new room.
|
||||||
|
* Make the homeserver inform the room via `m.room.aliases` when a new alias
|
||||||
|
is added for a room.
|
||||||
|
* Validate `m.room.power_level` events.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Add support for captchas on registration.
|
||||||
|
* Handle `m.room.aliases` events.
|
||||||
|
* Asynchronously send messages and show a local echo.
|
||||||
|
* Inform the UI when a message failed to send.
|
||||||
|
* Only autoscroll on receiving a new message if the user was already at the
|
||||||
|
bottom of the screen.
|
||||||
|
* Add support for ban/kick reasons.
|
||||||
|
|
||||||
|
Changes in synapse 0.2.1 (2014-09-03)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Added support for signing up with a third party id.
|
||||||
|
* Add synctl scripts.
|
||||||
|
* Added rate limiting.
|
||||||
|
* Add option to change the external address the content repo uses.
|
||||||
|
* Presence bug fixes.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Added support for signing up with a third party id.
|
||||||
|
* Added support for banning and kicking users.
|
||||||
|
* Added support for displaying and setting ops.
|
||||||
|
* Added support for room names.
|
||||||
|
* Fix bugs with room membership event display.
|
||||||
|
|
||||||
|
Changes in synapse 0.2.0 (2014-09-02)
|
||||||
|
=====================================
|
||||||
|
This update changes many configuration options, updates the
|
||||||
|
database schema and mandates SSL for server-server connections.
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Require SSL for server-server connections.
|
||||||
|
* Add SSL listener for client-server connections.
|
||||||
|
* Add ability to use config files.
|
||||||
|
* Add support for kicking/banning and power levels.
|
||||||
|
* Allow setting of room names and topics on creation.
|
||||||
|
* Change presence to include last seen time of the user.
|
||||||
|
* Change url path prefix to /_matrix/...
|
||||||
|
* Bug fixes to presence.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Reskin the CSS for registration and login.
|
||||||
|
* Various improvements to rooms CSS.
|
||||||
|
* Support changes in client-server API.
|
||||||
|
* Bug fixes to VOIP UI.
|
||||||
|
* Various bug fixes to handling of changes to room member list.
|
||||||
|
|
||||||
|
Changes in synapse 0.1.2 (2014-08-29)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Add basic call state UI for VoIP calls.
|
||||||
|
|
||||||
|
Changes in synapse 0.1.1 (2014-08-29)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Fix bug that caused the event stream to not notify some clients about
|
||||||
|
changes.
|
||||||
|
|
||||||
|
Changes in synapse 0.1.0 (2014-08-29)
|
||||||
|
=====================================
|
||||||
|
Presence has been reenabled in this release.
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Update client to server API, including:
|
||||||
|
- Use a more consistent url scheme.
|
||||||
|
- Provide more useful information in the initial sync api.
|
||||||
|
* Change the presence handling to be much more efficient.
|
||||||
|
* Change the presence server to server API to not require explicit polling of
|
||||||
|
all users who share a room with a user.
|
||||||
|
* Fix races in the event streaming logic.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Update to use new client to server API.
|
||||||
|
* Add basic VOIP support.
|
||||||
|
* Add idle timers that change your status to away.
|
||||||
|
* Add recent rooms column when viewing a room.
|
||||||
|
* Various network efficiency improvements.
|
||||||
|
* Add basic mobile browser support.
|
||||||
|
* Add a settings page.
|
||||||
|
|
||||||
|
Changes in synapse 0.0.1 (2014-08-22)
|
||||||
|
=====================================
|
||||||
|
Presence has been disabled in this release due to a bug that caused the
|
||||||
|
homeserver to spam other remote homeservers.
|
||||||
|
|
||||||
|
Homeserver:
|
||||||
|
* Completely change the database schema to support generic event types.
|
||||||
|
* Improve presence reliability.
|
||||||
|
* Improve reliability of joining remote rooms.
|
||||||
|
* Fix bug where room join events were duplicated.
|
||||||
|
* Improve initial sync API to return more information to the client.
|
||||||
|
* Stop generating fake messages for room membership events.
|
||||||
|
|
||||||
|
Webclient:
|
||||||
|
* Add tab completion of names.
|
||||||
|
* Add ability to upload and send images.
|
||||||
|
* Add profile pages.
|
||||||
|
* Improve CSS layout of room.
|
||||||
|
* Disambiguate identical display names.
|
||||||
|
* Don't get remote users display names and avatars individually.
|
||||||
|
* Use the new initial sync API to reduce number of round trips to the homeserver.
|
||||||
|
* Change url scheme to use room aliases instead of room ids where known.
|
||||||
|
* Increase longpoll timeout.
|
||||||
|
|
||||||
|
Changes in synapse 0.0.0 (2014-08-13)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
* Initial alpha release
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
# Welcome to Synapse
|
|
||||||
|
|
||||||
Please see the [contributors' guide](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html) in our rendered documentation.
|
|
||||||
466
Cargo.lock
generated
466
Cargo.lock
generated
@@ -1,466 +0,0 @@
|
|||||||
# This file is automatically @generated by Cargo.
|
|
||||||
# It is not intended for manual editing.
|
|
||||||
version = 3
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "aho-corasick"
|
|
||||||
version = "0.7.19"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
|
|
||||||
dependencies = [
|
|
||||||
"memchr",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "anyhow"
|
|
||||||
version = "1.0.68"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "arc-swap"
|
|
||||||
version = "1.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "983cd8b9d4b02a6dc6ffa557262eb5858a27a0038ffffe21a0f133eaa819a164"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "autocfg"
|
|
||||||
version = "1.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bitflags"
|
|
||||||
version = "1.3.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "blake2"
|
|
||||||
version = "0.10.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
|
|
||||||
dependencies = [
|
|
||||||
"digest",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "block-buffer"
|
|
||||||
version = "0.10.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
|
|
||||||
dependencies = [
|
|
||||||
"generic-array",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cfg-if"
|
|
||||||
version = "1.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "crypto-common"
|
|
||||||
version = "0.1.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
|
|
||||||
dependencies = [
|
|
||||||
"generic-array",
|
|
||||||
"typenum",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "digest"
|
|
||||||
version = "0.10.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
|
|
||||||
dependencies = [
|
|
||||||
"block-buffer",
|
|
||||||
"crypto-common",
|
|
||||||
"subtle",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "generic-array"
|
|
||||||
version = "0.14.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
|
|
||||||
dependencies = [
|
|
||||||
"typenum",
|
|
||||||
"version_check",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hex"
|
|
||||||
version = "0.4.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "indoc"
|
|
||||||
version = "1.0.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "itoa"
|
|
||||||
version = "1.0.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lazy_static"
|
|
||||||
version = "1.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "libc"
|
|
||||||
version = "0.2.135"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lock_api"
|
|
||||||
version = "0.4.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg",
|
|
||||||
"scopeguard",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "log"
|
|
||||||
version = "0.4.17"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "memchr"
|
|
||||||
version = "2.5.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "memoffset"
|
|
||||||
version = "0.6.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "once_cell"
|
|
||||||
version = "1.15.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "parking_lot"
|
|
||||||
version = "0.12.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
|
|
||||||
dependencies = [
|
|
||||||
"lock_api",
|
|
||||||
"parking_lot_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "parking_lot_core"
|
|
||||||
version = "0.9.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"libc",
|
|
||||||
"redox_syscall",
|
|
||||||
"smallvec",
|
|
||||||
"windows-sys",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro2"
|
|
||||||
version = "1.0.46"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-ident",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyo3"
|
|
||||||
version = "0.17.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "268be0c73583c183f2b14052337465768c07726936a260f480f0857cb95ba543"
|
|
||||||
dependencies = [
|
|
||||||
"anyhow",
|
|
||||||
"cfg-if",
|
|
||||||
"indoc",
|
|
||||||
"libc",
|
|
||||||
"memoffset",
|
|
||||||
"parking_lot",
|
|
||||||
"pyo3-build-config",
|
|
||||||
"pyo3-ffi",
|
|
||||||
"pyo3-macros",
|
|
||||||
"unindent",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyo3-build-config"
|
|
||||||
version = "0.17.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "28fcd1e73f06ec85bf3280c48c67e731d8290ad3d730f8be9dc07946923005c8"
|
|
||||||
dependencies = [
|
|
||||||
"once_cell",
|
|
||||||
"target-lexicon",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyo3-ffi"
|
|
||||||
version = "0.17.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0f6cb136e222e49115b3c51c32792886defbfb0adead26a688142b346a0b9ffc"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"pyo3-build-config",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyo3-log"
|
|
||||||
version = "0.7.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e5695ccff5060c13ca1751cf8c857a12da9b0bf0378cb071c5e0326f7c7e4c1b"
|
|
||||||
dependencies = [
|
|
||||||
"arc-swap",
|
|
||||||
"log",
|
|
||||||
"pyo3",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyo3-macros"
|
|
||||||
version = "0.17.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "94144a1266e236b1c932682136dc35a9dee8d3589728f68130c7c3861ef96b28"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"pyo3-macros-backend",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyo3-macros-backend"
|
|
||||||
version = "0.17.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pythonize"
|
|
||||||
version = "0.17.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0f7f0c136f5fbc01868185eef462800e49659eb23acca83b9e884367a006acb6"
|
|
||||||
dependencies = [
|
|
||||||
"pyo3",
|
|
||||||
"serde",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quote"
|
|
||||||
version = "1.0.21"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "redox_syscall"
|
|
||||||
version = "0.2.16"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex"
|
|
||||||
version = "1.7.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
|
|
||||||
dependencies = [
|
|
||||||
"aho-corasick",
|
|
||||||
"memchr",
|
|
||||||
"regex-syntax",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex-syntax"
|
|
||||||
version = "0.6.27"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ryu"
|
|
||||||
version = "1.0.11"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "scopeguard"
|
|
||||||
version = "1.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde"
|
|
||||||
version = "1.0.152"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
|
|
||||||
dependencies = [
|
|
||||||
"serde_derive",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde_derive"
|
|
||||||
version = "1.0.152"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde_json"
|
|
||||||
version = "1.0.91"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
|
|
||||||
dependencies = [
|
|
||||||
"itoa",
|
|
||||||
"ryu",
|
|
||||||
"serde",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "smallvec"
|
|
||||||
version = "1.10.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "subtle"
|
|
||||||
version = "2.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "syn"
|
|
||||||
version = "1.0.104"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4ae548ec36cf198c0ef7710d3c230987c2d6d7bd98ad6edc0274462724c585ce"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"unicode-ident",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "synapse"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"anyhow",
|
|
||||||
"blake2",
|
|
||||||
"hex",
|
|
||||||
"lazy_static",
|
|
||||||
"log",
|
|
||||||
"pyo3",
|
|
||||||
"pyo3-log",
|
|
||||||
"pythonize",
|
|
||||||
"regex",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "target-lexicon"
|
|
||||||
version = "0.12.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typenum"
|
|
||||||
version = "1.15.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-ident"
|
|
||||||
version = "1.0.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unindent"
|
|
||||||
version = "0.1.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "58ee9362deb4a96cef4d437d1ad49cffc9b9e92d202b6995674e928ce684f112"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "version_check"
|
|
||||||
version = "0.9.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-sys"
|
|
||||||
version = "0.36.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
|
|
||||||
dependencies = [
|
|
||||||
"windows_aarch64_msvc",
|
|
||||||
"windows_i686_gnu",
|
|
||||||
"windows_i686_msvc",
|
|
||||||
"windows_x86_64_gnu",
|
|
||||||
"windows_x86_64_msvc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows_aarch64_msvc"
|
|
||||||
version = "0.36.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows_i686_gnu"
|
|
||||||
version = "0.36.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows_i686_msvc"
|
|
||||||
version = "0.36.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows_x86_64_gnu"
|
|
||||||
version = "0.36.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows_x86_64_msvc"
|
|
||||||
version = "0.36.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
# We make the whole Synapse folder a workspace so that we can run `cargo`
|
|
||||||
# commands from the root (rather than having to cd into rust/).
|
|
||||||
|
|
||||||
[workspace]
|
|
||||||
members = ["rust"]
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
# Installation Instructions
|
|
||||||
|
|
||||||
This document has moved to the
|
|
||||||
[Synapse documentation website](https://matrix-org.github.io/synapse/latest/setup/installation.html).
|
|
||||||
Please update your links.
|
|
||||||
|
|
||||||
The markdown source is available in [docs/setup/installation.md](docs/setup/installation.md).
|
|
||||||
14
MANIFEST.in
Normal file
14
MANIFEST.in
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
include synctl
|
||||||
|
include LICENSE
|
||||||
|
include VERSION
|
||||||
|
include *.rst
|
||||||
|
include demo/README
|
||||||
|
|
||||||
|
recursive-include synapse/storage/schema *.sql
|
||||||
|
|
||||||
|
recursive-include demo *.dh
|
||||||
|
recursive-include demo *.py
|
||||||
|
recursive-include demo *.sh
|
||||||
|
recursive-include docs *
|
||||||
|
recursive-include scripts *
|
||||||
|
recursive-include tests *.py
|
||||||
35
MAP.rst
Normal file
35
MAP.rst
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
Directory Structure
|
||||||
|
===================
|
||||||
|
|
||||||
|
Warning: this may be a bit stale...
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
.
|
||||||
|
├── cmdclient Basic CLI python Matrix client
|
||||||
|
├── demo Scripts for running standalone Matrix demos
|
||||||
|
├── docs All doc, including the draft Matrix API spec
|
||||||
|
│ ├── client-server The client-server Matrix API spec
|
||||||
|
│ ├── model Domain-specific elements of the Matrix API spec
|
||||||
|
│ ├── server-server The server-server model of the Matrix API spec
|
||||||
|
│ └── sphinx The internal API doc of the Synapse homeserver
|
||||||
|
├── experiments Early experiments of using Synapse's internal APIs
|
||||||
|
├── graph Visualisation of Matrix's distributed message store
|
||||||
|
├── synapse The reference Matrix homeserver implementation
|
||||||
|
│ ├── api Common building blocks for the APIs
|
||||||
|
│ │ ├── events Definition of state representation Events
|
||||||
|
│ │ └── streams Definition of streamable Event objects
|
||||||
|
│ ├── app The __main__ entry point for the homeserver
|
||||||
|
│ ├── crypto The PKI client/server used for secure federation
|
||||||
|
│ │ └── resource PKI helper objects (e.g. keys)
|
||||||
|
│ ├── federation Server-server state replication logic
|
||||||
|
│ ├── handlers The main business logic of the homeserver
|
||||||
|
│ ├── http Wrappers around Twisted's HTTP server & client
|
||||||
|
│ ├── rest Servlet-style RESTful API
|
||||||
|
│ ├── storage Persistence subsystem (currently only sqlite3)
|
||||||
|
│ │ └── schema sqlite persistence schema
|
||||||
|
│ └── util Synapse-specific utilities
|
||||||
|
├── tests Unit tests for the Synapse homeserver
|
||||||
|
└── webclient Basic AngularJS Matrix web client
|
||||||
|
|
||||||
|
|
||||||
580
README.rst
580
README.rst
@@ -1,250 +1,444 @@
|
|||||||
=========================================================================
|
Introduction
|
||||||
Synapse |support| |development| |documentation| |license| |pypi| |python|
|
============
|
||||||
=========================================================================
|
|
||||||
|
|
||||||
Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
|
Matrix is an ambitious new ecosystem for open federated Instant Messaging and
|
||||||
maintained by the Matrix.org Foundation. We began rapid development in 2014,
|
VoIP. The basics you need to know to get up and running are:
|
||||||
reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
|
|
||||||
in earnest today.
|
|
||||||
|
|
||||||
Briefly, Matrix is an open standard for communications on the internet, supporting
|
- Everything in Matrix happens in a room. Rooms are distributed and do not
|
||||||
federation, encryption and VoIP. Matrix.org has more to say about the `goals of the
|
exist on any single server. Rooms can be located using convenience aliases
|
||||||
Matrix project <https://matrix.org/docs/guides/introduction>`_, and the `formal specification
|
like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
|
||||||
<https://spec.matrix.org/>`_ describes the technical details.
|
|
||||||
|
|
||||||
.. contents::
|
- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
|
||||||
|
you will normally refer to yourself and others using a 3PID: email
|
||||||
|
address, phone number, etc rather than manipulating Matrix user IDs)
|
||||||
|
|
||||||
Installing and configuration
|
The overall architecture is::
|
||||||
============================
|
|
||||||
|
|
||||||
The Synapse documentation describes `how to install Synapse <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
client <----> homeserver <=====================> homeserver <----> client
|
||||||
`Docker images <https://matrix-org.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
https://somewhere.org/_matrix https://elsewhere.net/_matrix
|
||||||
<https://matrix-org.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
|
|
||||||
|
|
||||||
.. _federation:
|
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
||||||
|
accessed by the web client at http://matrix.org/alpha or via an IRC bridge at
|
||||||
|
irc://irc.freenode.net/matrix.
|
||||||
|
|
||||||
Synapse has a variety of `config options
|
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||||
<https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||||
which can be used to customise its behaviour after installation.
|
|
||||||
There are additional details on how to `configure Synapse for federation here
|
|
||||||
<https://matrix-org.github.io/synapse/latest/federate.html>`_.
|
|
||||||
|
|
||||||
.. _reverse-proxy:
|
About Matrix
|
||||||
|
============
|
||||||
|
|
||||||
Using a reverse proxy with Synapse
|
Matrix specifies a set of pragmatic RESTful HTTP JSON APIs as an open standard,
|
||||||
----------------------------------
|
which handle:
|
||||||
|
|
||||||
It is recommended to put a reverse proxy such as
|
- Creating and managing fully distributed chat rooms with no
|
||||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
single points of control or failure
|
||||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
- Eventually-consistent cryptographically secure synchronisation of room
|
||||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
state across a global open network of federated servers and services
|
||||||
`HAProxy <https://www.haproxy.org/>`_ or
|
- Sending and receiving extensible messages in a room with (optional)
|
||||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
end-to-end encryption[1]
|
||||||
doing so is that it means that you can expose the default https port (443) to
|
- Inviting, joining, leaving, kicking, banning room members
|
||||||
Matrix clients without needing to run Synapse with root privileges.
|
- Managing user accounts (registration, login, logout)
|
||||||
For information on configuring one, see `the reverse proxy docs
|
- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
|
||||||
<https://matrix-org.github.io/synapse/latest/reverse_proxy.html>`_.
|
Facebook accounts to authenticate, identify and discover users on Matrix.
|
||||||
|
- Placing 1:1 VoIP and Video calls
|
||||||
|
|
||||||
Upgrading an existing Synapse
|
These APIs are intended to be implemented on a wide range of servers, services
|
||||||
-----------------------------
|
and clients, letting developers build messaging and VoIP functionality on top
|
||||||
|
of the entirely open Matrix ecosystem rather than using closed or proprietary
|
||||||
|
solutions. The hope is for Matrix to act as the building blocks for a new
|
||||||
|
generation of fully open and interoperable messaging and VoIP apps for the
|
||||||
|
internet.
|
||||||
|
|
||||||
The instructions for upgrading Synapse are in `the upgrade notes`_.
|
Synapse is a reference "homeserver" implementation of Matrix from the core
|
||||||
Please check these instructions as upgrading may require extra steps for some
|
development team at matrix.org, written in Python/Twisted for clarity and
|
||||||
versions of Synapse.
|
simplicity. It is intended to showcase the concept of Matrix and let folks see
|
||||||
|
the spec in the context of a codebase and let you run your own homeserver and
|
||||||
|
generally help bootstrap the ecosystem.
|
||||||
|
|
||||||
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
|
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||||
|
a Matrix homeserver which stores all their personal chat history and user
|
||||||
|
account information - much as a mail client connects through to an IMAP/SMTP
|
||||||
|
server. Just like email, you can either run your own Matrix homeserver and
|
||||||
|
control and own your own communications and history or use one hosted by
|
||||||
|
someone else (e.g. matrix.org) - there is no single point of control or
|
||||||
|
mandatory service provider in Matrix, unlike WhatsApp, Facebook, Hangouts, etc.
|
||||||
|
|
||||||
|
Synapse ships with two basic demo Matrix clients: webclient (a basic group chat
|
||||||
|
web client demo implemented in AngularJS) and cmdclient (a basic Python
|
||||||
|
command line utility which lets you easily see what the JSON APIs are up to).
|
||||||
|
|
||||||
|
Meanwhile, iOS and Android SDKs and clients are currently in development and available from:
|
||||||
|
|
||||||
|
- https://github.com/matrix-org/matrix-ios-sdk
|
||||||
|
- https://github.com/matrix-org/matrix-android-sdk
|
||||||
|
|
||||||
|
We'd like to invite you to join #matrix:matrix.org (via http://matrix.org/alpha), run a homeserver, take a look at the Matrix spec at
|
||||||
|
http://matrix.org/docs/spec, experiment with the APIs and the demo
|
||||||
|
clients, and report any bugs via http://matrix.org/jira.
|
||||||
|
|
||||||
|
Thanks for using Matrix!
|
||||||
|
|
||||||
|
[1] End-to-end encryption is currently in development
|
||||||
|
|
||||||
|
Homeserver Installation
|
||||||
|
=======================
|
||||||
|
|
||||||
|
System requirements:
|
||||||
|
- POSIX-compliant system (tested on Linux & OSX)
|
||||||
|
- Python 2.7
|
||||||
|
|
||||||
|
Synapse is written in python but some of the libraries is uses are written in
|
||||||
|
C. So before we can install synapse itself we need a working C compiler and the
|
||||||
|
header files for python C extensions.
|
||||||
|
|
||||||
|
Installing prerequisites on Ubuntu or Debian::
|
||||||
|
|
||||||
|
$ sudo apt-get install build-essential python2.7-dev libffi-dev \
|
||||||
|
python-pip python-setuptools sqlite3 \
|
||||||
|
libssl-dev python-virtualenv libjpeg-dev
|
||||||
|
|
||||||
|
Installing prerequisites on ArchLinux::
|
||||||
|
|
||||||
|
$ sudo pacman -S base-devel python2 python-pip \
|
||||||
|
python-setuptools python-virtualenv sqlite3
|
||||||
|
|
||||||
|
Installing prerequisites on Mac OS X::
|
||||||
|
|
||||||
|
$ xcode-select --install
|
||||||
|
$ sudo pip install virtualenv
|
||||||
|
|
||||||
|
To install the synapse homeserver run::
|
||||||
|
|
||||||
|
$ virtualenv ~/.synapse
|
||||||
|
$ source ~/.synapse/bin/activate
|
||||||
|
$ pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
|
This installs synapse, along with the libraries it uses, into a virtual
|
||||||
|
environment under ``~/.synapse``.
|
||||||
|
|
||||||
|
To set up your homeserver, run (in your virtualenv, as before)::
|
||||||
|
|
||||||
|
$ cd ~/.synapse
|
||||||
|
$ python -m synapse.app.homeserver \
|
||||||
|
--server-name machine.my.domain.name \
|
||||||
|
--config-path homeserver.yaml \
|
||||||
|
--generate-config
|
||||||
|
|
||||||
|
Substituting your host and domain name as appropriate.
|
||||||
|
|
||||||
|
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||||
|
a TURN server. See docs/turn-howto.rst for details.
|
||||||
|
|
||||||
|
Troubleshooting Installation
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Synapse requires pip 1.7 or later, so if your OS provides too old a version and
|
||||||
|
you get errors about ``error: no such option: --process-dependency-links`` you
|
||||||
|
may need to manually upgrade it::
|
||||||
|
|
||||||
|
$ sudo pip install --upgrade pip
|
||||||
|
|
||||||
|
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
|
||||||
|
refuse to run until you remove the temporary installation directory it
|
||||||
|
created. To reset the installation::
|
||||||
|
|
||||||
|
$ rm -rf /tmp/pip_install_matrix
|
||||||
|
|
||||||
|
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||||
|
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||||
|
happens, you will have to individually install the dependencies which are
|
||||||
|
failing, e.g.::
|
||||||
|
|
||||||
|
$ pip install twisted
|
||||||
|
|
||||||
|
On OSX, if you encounter clang: error: unknown argument: '-mno-fused-madd' you
|
||||||
|
will need to export CFLAGS=-Qunused-arguments.
|
||||||
|
|
||||||
|
ArchLinux
|
||||||
|
---------
|
||||||
|
|
||||||
|
Installation on ArchLinux may encounter a few hiccups as Arch defaults to
|
||||||
|
python 3, but synapse currently assumes python 2.7 by default.
|
||||||
|
|
||||||
|
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
||||||
|
|
||||||
|
$ sudo pip2.7 install --upgrade pip
|
||||||
|
|
||||||
|
You also may need to explicitly specify python 2.7 again during the install
|
||||||
|
request::
|
||||||
|
|
||||||
|
$ pip2.7 install --process-dependency-links \
|
||||||
|
https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
|
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||||
|
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||||
|
compile it under the right architecture. (This should not be needed if
|
||||||
|
installing under virtualenv)::
|
||||||
|
|
||||||
|
$ sudo pip2.7 uninstall py-bcrypt
|
||||||
|
$ sudo pip2.7 install py-bcrypt
|
||||||
|
|
||||||
|
During setup of homeserver you need to call python2.7 directly again::
|
||||||
|
|
||||||
|
$ cd ~/.synapse
|
||||||
|
$ python2.7 -m synapse.app.homeserver \
|
||||||
|
--server-name machine.my.domain.name \
|
||||||
|
--config-path homeserver.yaml \
|
||||||
|
--generate-config
|
||||||
|
|
||||||
|
...substituting your host and domain name as appropriate.
|
||||||
|
|
||||||
|
Windows Install
|
||||||
|
---------------
|
||||||
|
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
||||||
|
|
||||||
|
- gcc
|
||||||
|
- git
|
||||||
|
- libffi-devel
|
||||||
|
- openssl (and openssl-devel, python-openssl)
|
||||||
|
- python
|
||||||
|
- python-setuptools
|
||||||
|
|
||||||
|
The content repository requires additional packages and will be unable to process
|
||||||
|
uploads without them:
|
||||||
|
- libjpeg8
|
||||||
|
- libjpeg8-devel
|
||||||
|
- zlib
|
||||||
|
If you choose to install Synapse without these packages, you will need to reinstall
|
||||||
|
``pillow`` for changes to be applied, e.g. ``pip uninstall pillow`` ``pip install
|
||||||
|
pillow --user``
|
||||||
|
|
||||||
|
Troubleshooting:
|
||||||
|
|
||||||
|
- You may need to upgrade ``setuptools`` to get this to work correctly:
|
||||||
|
``pip install setuptools --upgrade``.
|
||||||
|
- You may encounter errors indicating that ``ffi.h`` is missing, even with
|
||||||
|
``libffi-devel`` installed. If you do, copy the ``.h`` files:
|
||||||
|
``cp /usr/lib/libffi-3.0.13/include/*.h /usr/include``
|
||||||
|
- You may need to install libsodium from source in order to install PyNacl. If
|
||||||
|
you do, you may need to create a symlink to ``libsodium.a`` so ``ld`` can find
|
||||||
|
it: ``ln -s /usr/local/lib/libsodium.a /usr/lib/libsodium.a``
|
||||||
|
|
||||||
|
Running Your Homeserver
|
||||||
|
=======================
|
||||||
|
|
||||||
|
To actually run your new homeserver, pick a working directory for Synapse to run
|
||||||
|
(e.g. ``~/.synapse``), and::
|
||||||
|
|
||||||
|
$ cd ~/.synapse
|
||||||
|
$ source ./bin/activate
|
||||||
|
$ synctl start
|
||||||
|
|
||||||
|
Troubleshooting Running
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
If synapse fails with ``missing "sodium.h"`` crypto errors, you may need
|
||||||
|
to manually upgrade PyNaCL, as synapse uses NaCl (http://nacl.cr.yp.to/) for
|
||||||
|
encryption and digital signatures.
|
||||||
|
Unfortunately PyNACL currently has a few issues
|
||||||
|
(https://github.com/pyca/pynacl/issues/53) and
|
||||||
|
(https://github.com/pyca/pynacl/issues/79) that mean it may not install
|
||||||
|
correctly, causing all tests to fail with errors about missing "sodium.h". To
|
||||||
|
fix try re-installing from PyPI or directly from
|
||||||
|
(https://github.com/pyca/pynacl)::
|
||||||
|
|
||||||
|
$ # Install from PyPI
|
||||||
|
$ pip install --user --upgrade --force pynacl
|
||||||
|
$ # Install from github
|
||||||
|
$ pip install --user https://github.com/pyca/pynacl/tarball/master
|
||||||
|
|
||||||
|
ArchLinux
|
||||||
|
---------
|
||||||
|
|
||||||
|
If running `$ synctl start` fails wit 'returned non-zero exit status 1', you will need to explicitly call Python2.7 - either running as::
|
||||||
|
|
||||||
|
$ python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml --pid-file homeserver.pid
|
||||||
|
|
||||||
|
...or by editing synctl with the correct python executable.
|
||||||
|
|
||||||
|
Homeserver Development
|
||||||
|
======================
|
||||||
|
|
||||||
|
To check out a homeserver for development, clone the git repo into a working
|
||||||
|
directory of your choice::
|
||||||
|
|
||||||
|
$ git clone https://github.com/matrix-org/synapse.git
|
||||||
|
$ cd synapse
|
||||||
|
|
||||||
|
The homeserver has a number of external dependencies, that are easiest
|
||||||
|
to install using pip and a virtualenv::
|
||||||
|
|
||||||
|
$ virtualenv env
|
||||||
|
$ source env/bin/activate
|
||||||
|
$ python synapse/python_dependencies.py | xargs -n1 pip install
|
||||||
|
$ pip install setuptools_trial mock
|
||||||
|
|
||||||
|
This will run a process of downloading and installing all the needed
|
||||||
|
dependencies into a virtual env.
|
||||||
|
|
||||||
|
Once this is done, you may wish to run the homeserver's unit tests, to
|
||||||
|
check that everything is installed as it should be::
|
||||||
|
|
||||||
|
$ python setup.py test
|
||||||
|
|
||||||
|
This should end with a 'PASSED' result::
|
||||||
|
|
||||||
|
Ran 143 tests in 0.601s
|
||||||
|
|
||||||
|
PASSED (successes=143)
|
||||||
|
|
||||||
|
|
||||||
Platform dependencies
|
Upgrading an existing homeserver
|
||||||
---------------------
|
================================
|
||||||
|
|
||||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
IMPORTANT: Before upgrading an existing homeserver to a new version, please
|
||||||
and aims to follow supported upstream versions. See the
|
refer to UPGRADE.rst for any additional instructions.
|
||||||
`deprecation policy <https://matrix-org.github.io/synapse/latest/deprecation_policy.html>`_
|
|
||||||
for more details.
|
Otherwise, simply re-install the new codebase over the current one - e.g.
|
||||||
|
by ``pip install --process-dependency-links
|
||||||
|
https://github.com/matrix-org/synapse/tarball/master``
|
||||||
|
if using pip, or by ``git pull`` if running off a git working copy.
|
||||||
|
|
||||||
|
|
||||||
Security note
|
Setting up Federation
|
||||||
-------------
|
=====================
|
||||||
|
|
||||||
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
In order for other homeservers to send messages to your server, it will need to
|
||||||
repository endpoints`_.
|
be publicly visible on the internet, and they will need to know its host name.
|
||||||
|
You have two choices here, which will influence the form of your Matrix user
|
||||||
|
IDs:
|
||||||
|
|
||||||
.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
|
1) Use the machine's own hostname as available on public DNS in the form of
|
||||||
|
its A or AAAA records. This is easier to set up initially, perhaps for
|
||||||
|
testing, but lacks the flexibility of SRV.
|
||||||
|
|
||||||
Whilst we make a reasonable effort to mitigate against XSS attacks (for
|
2) Set up a SRV record for your domain name. This requires you create a SRV
|
||||||
instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
|
record in DNS, but gives the flexibility to run the server on your own
|
||||||
domain hosting other web applications. This especially applies to sharing
|
choice of TCP port, on a machine that might not be the same name as the
|
||||||
the domain with Matrix web clients and other sensitive applications like
|
domain name.
|
||||||
webmail. See
|
|
||||||
https://developer.github.com/changes/2014-04-25-user-content-security for more
|
|
||||||
information.
|
|
||||||
|
|
||||||
.. _CSP: https://github.com/matrix-org/synapse/pull/1021
|
For the first form, simply pass the required hostname (of the machine) as the
|
||||||
|
--server-name parameter::
|
||||||
|
|
||||||
Ideally, the homeserver should not simply be on a different subdomain, but on
|
$ python -m synapse.app.homeserver \
|
||||||
a completely different `registered domain`_ (also known as top-level site or
|
--server-name machine.my.domain.name \
|
||||||
eTLD+1). This is because `some attacks`_ are still possible as long as the two
|
--config-path homeserver.yaml \
|
||||||
applications share the same registered domain.
|
--generate-config
|
||||||
|
$ python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||||
|
|
||||||
.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
|
Alternatively, you can run ``synctl start`` to guide you through the process.
|
||||||
|
|
||||||
.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
|
For the second form, first create your SRV record and publish it in DNS. This
|
||||||
|
needs to be named _matrix._tcp.YOURDOMAIN, and point at at least one hostname
|
||||||
|
and port where the server is running. (At the current time synapse does not
|
||||||
|
support clustering multiple servers into a single logical homeserver). The DNS
|
||||||
|
record would then look something like::
|
||||||
|
|
||||||
To illustrate this with an example, if your Element Web or other sensitive web
|
$ dig -t srv _matrix._tcp.machine.my.domaine.name
|
||||||
application is hosted on ``A.example1.com``, you should ideally host Synapse on
|
_matrix._tcp IN SRV 10 0 8448 machine.my.domain.name.
|
||||||
``example2.com``. Some amount of protection is offered by hosting on
|
|
||||||
``B.example1.com`` instead, so this is also acceptable in some scenarios.
|
|
||||||
However, you should *not* host your Synapse on ``A.example1.com``.
|
|
||||||
|
|
||||||
Note that all of the above refers exclusively to the domain used in Synapse's
|
|
||||||
``public_baseurl`` setting. In particular, it has no bearing on the domain
|
|
||||||
mentioned in MXIDs hosted on that server.
|
|
||||||
|
|
||||||
Following this advice ensures that even if an XSS is found in Synapse, the
|
|
||||||
impact to other applications will be minimal.
|
|
||||||
|
|
||||||
|
|
||||||
Testing a new installation
|
At this point, you should then run the homeserver with the hostname of this
|
||||||
==========================
|
SRV record, as that is the name other machines will expect it to have::
|
||||||
|
|
||||||
The easiest way to try out your new Synapse installation is by connecting to it
|
$ python -m synapse.app.homeserver \
|
||||||
from a web client.
|
--server-name YOURDOMAIN \
|
||||||
|
--bind-port 8448 \
|
||||||
|
--config-path homeserver.yaml \
|
||||||
|
--generate-config
|
||||||
|
$ python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||||
|
|
||||||
Unless you are running a test instance of Synapse on your local machine, in
|
|
||||||
general, you will need to enable TLS support before you can successfully
|
|
||||||
connect from a client: see
|
|
||||||
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
|
||||||
|
|
||||||
An easy way to get started is to login or register via Element at
|
You may additionally want to pass one or more "-v" options, in order to
|
||||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
increase the verbosity of logging output; at least for initial testing.
|
||||||
You will need to change the server you are logging into from ``matrix.org``
|
|
||||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
|
||||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
|
||||||
If you prefer to use another client, refer to our
|
|
||||||
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
|
||||||
|
|
||||||
If all goes well you should at least be able to log in, create a room, and
|
For the initial alpha release, the homeserver is not speaking TLS for
|
||||||
start sending messages.
|
either client-server or server-server traffic for ease of debugging. We have
|
||||||
|
also not spent any time yet getting the homeserver to run behind loadbalancers.
|
||||||
|
|
||||||
.. _`client-user-reg`:
|
Running a Demo Federation of Homeservers
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
Registering a new user from a client
|
If you want to get up and running quickly with a trio of homeservers in a
|
||||||
------------------------------------
|
private federation (``localhost:8080``, ``localhost:8081`` and
|
||||||
|
``localhost:8082``) which you can then access through the webclient running at
|
||||||
|
http://localhost:8080. Simply run::
|
||||||
|
|
||||||
By default, registration of new users via Matrix clients is disabled. To enable
|
$ demo/start.sh
|
||||||
it:
|
|
||||||
|
This is mainly useful just for development purposes.
|
||||||
|
|
||||||
1. In the
|
Running The Demo Web Client
|
||||||
`registration config section <https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
|
|
||||||
set ``enable_registration: true`` in ``homeserver.yaml``.
|
|
||||||
2. Then **either**:
|
|
||||||
|
|
||||||
a. set up a `CAPTCHA <https://matrix-org.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
|
|
||||||
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
|
|
||||||
|
|
||||||
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
|
||||||
the public internet. Without it, anyone can freely register accounts on your homeserver.
|
|
||||||
This can be exploited by attackers to create spambots targetting the rest of the Matrix
|
|
||||||
federation.
|
|
||||||
|
|
||||||
Your new user name will be formed partly from the ``server_name``, and partly
|
|
||||||
from a localpart you specify when you create the account. Your name will take
|
|
||||||
the form of::
|
|
||||||
|
|
||||||
@localpart:my.domain.name
|
|
||||||
|
|
||||||
(pronounced "at localpart on my dot domain dot name").
|
|
||||||
|
|
||||||
As when logging in, you will need to specify a "Custom server". Specify your
|
|
||||||
desired ``localpart`` in the 'User name' box.
|
|
||||||
|
|
||||||
Troubleshooting and support
|
|
||||||
===========================
|
===========================
|
||||||
|
|
||||||
The `Admin FAQ <https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html>`_
|
The homeserver runs a web client by default at https://localhost:8448/.
|
||||||
includes tips on dealing with some common problems. For more details, see
|
|
||||||
`Synapse's wider documentation <https://matrix-org.github.io/synapse/latest/>`_.
|
|
||||||
|
|
||||||
For additional support installing or managing Synapse, please ask in the community
|
If this is the first time you have used the client from that browser (it uses
|
||||||
support room |room|_ (from a matrix.org account if necessary). We do not use GitHub
|
HTML5 local storage to remember its config), you will need to log in to your
|
||||||
issues for support requests, only for bug reports and feature requests.
|
account. If you don't yet have an account, because you've just started the
|
||||||
|
homeserver for the first time, then you'll need to register one.
|
||||||
|
|
||||||
.. |room| replace:: ``#synapse:matrix.org``
|
|
||||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
|
||||||
|
|
||||||
.. |docs| replace:: ``docs``
|
Registering A New Account
|
||||||
.. _docs: docs
|
-------------------------
|
||||||
|
|
||||||
|
Your new user name will be formed partly from the hostname your server is
|
||||||
|
running as, and partly from a localpart you specify when you create the
|
||||||
|
account. Your name will take the form of::
|
||||||
|
|
||||||
|
@localpart:my.domain.here
|
||||||
|
(pronounced "at localpart on my dot domain dot here")
|
||||||
|
|
||||||
|
Specify your desired localpart in the topmost box of the "Register for an
|
||||||
|
account" form, and click the "Register" button. Hostnames can contain ports if
|
||||||
|
required due to lack of SRV records (e.g. @matthew:localhost:8448 on an
|
||||||
|
internal synapse sandbox running on localhost)
|
||||||
|
|
||||||
|
|
||||||
|
Logging In To An Existing Account
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
Just enter the ``@localpart:my.domain.here`` Matrix user ID and password into
|
||||||
|
the form and click the Login button.
|
||||||
|
|
||||||
|
|
||||||
Identity Servers
|
Identity Servers
|
||||||
================
|
================
|
||||||
|
|
||||||
Identity servers have the job of mapping email addresses and other 3rd Party
|
The job of authenticating 3PIDs and tracking which 3PIDs are associated with a
|
||||||
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
given Matrix user is very security-sensitive, as there is obvious risk of spam
|
||||||
before creating that mapping.
|
if it is too easy to sign up for Matrix accounts or harvest 3PID data.
|
||||||
|
Meanwhile the job of publishing the end-to-end encryption public keys for
|
||||||
|
Matrix users is also very security-sensitive for similar reasons.
|
||||||
|
|
||||||
**They are not where accounts or credentials are stored - these live on home
|
Therefore the role of managing trusted identity in the Matrix ecosystem is
|
||||||
servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
|
farmed out to a cluster of known trusted ecosystem partners, who run 'Matrix
|
||||||
|
Identity Servers' such as ``sydent``, whose role is purely to authenticate and
|
||||||
|
track 3PID logins and publish end-user public keys.
|
||||||
|
|
||||||
This process is very security-sensitive, as there is obvious risk of spam if it
|
It's currently early days for identity servers as Matrix is not yet using 3PIDs
|
||||||
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
as the primary means of identity and E2E encryption is not complete. As such,
|
||||||
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
we are running a single identity server (http://matrix.org:8090) at the current
|
||||||
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
time.
|
||||||
the role of managing trusted identity in the Matrix ecosystem is farmed out to
|
|
||||||
a cluster of known trusted ecosystem partners, who run 'Matrix Identity
|
|
||||||
Servers' such as `Sydent <https://github.com/matrix-org/sydent>`_, whose role
|
|
||||||
is purely to authenticate and track 3PID logins and publish end-user public
|
|
||||||
keys.
|
|
||||||
|
|
||||||
You can host your own copy of Sydent, but this will prevent you reaching other
|
|
||||||
users in the Matrix ecosystem via their email address, and prevent them finding
|
|
||||||
you. We therefore recommend that you use one of the centralised identity servers
|
|
||||||
at ``https://matrix.org`` or ``https://vector.im`` for now.
|
|
||||||
|
|
||||||
To reiterate: the Identity server will only be used if you choose to associate
|
|
||||||
an email address with your account, or send an invite to another user via their
|
|
||||||
email address.
|
|
||||||
|
|
||||||
|
|
||||||
Development
|
Where's the spec?!
|
||||||
===========
|
==================
|
||||||
|
|
||||||
We welcome contributions to Synapse from the community!
|
The source of the matrix spec lives at https://github.com/matrix-org/matrix-doc.
|
||||||
The best place to get started is our
|
A recent HTML snapshot of this lives at http://matrix.org/docs/spec
|
||||||
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
|
||||||
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
|
||||||
|
|
||||||
information for Synapse developers as well as Synapse administrators.
|
|
||||||
Developers might be particularly interested in:
|
|
||||||
|
|
||||||
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
|
||||||
* `notes on Synapse's implementation details <https://matrix-org.github.io/synapse/latest/development/internal_documentation/index.html>`_, and
|
|
||||||
* `how we use git <https://matrix-org.github.io/synapse/latest/development/git.html>`_.
|
|
||||||
|
|
||||||
Alongside all that, join our developer community on Matrix:
|
|
||||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
|
||||||
|
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
|
Building Internal API Documentation
|
||||||
:alt: (get support on #synapse:matrix.org)
|
===================================
|
||||||
:target: https://matrix.to/#/#synapse:matrix.org
|
|
||||||
|
|
||||||
.. |development| image:: https://img.shields.io/matrix/synapse-dev:matrix.org?label=development&logo=matrix
|
Before building internal API documentation install sphinx and
|
||||||
:alt: (discuss development on #synapse-dev:matrix.org)
|
sphinxcontrib-napoleon::
|
||||||
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
|
||||||
|
|
||||||
.. |documentation| image:: https://img.shields.io/badge/documentation-%E2%9C%93-success
|
$ pip install sphinx
|
||||||
:alt: (Rendered documentation on GitHub Pages)
|
$ pip install sphinxcontrib-napoleon
|
||||||
:target: https://matrix-org.github.io/synapse/latest/
|
|
||||||
|
|
||||||
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
Building internal API documentation::
|
||||||
:alt: (check license in LICENSE file)
|
|
||||||
:target: LICENSE
|
|
||||||
|
|
||||||
.. |pypi| image:: https://img.shields.io/pypi/v/matrix-synapse
|
$ python setup.py build_sphinx
|
||||||
:alt: (latest version released on PyPi)
|
|
||||||
:target: https://pypi.org/project/matrix-synapse
|
|
||||||
|
|
||||||
.. |python| image:: https://img.shields.io/pypi/pyversions/matrix-synapse
|
|
||||||
:alt: (supported python versions)
|
|
||||||
:target: https://pypi.org/project/matrix-synapse
|
|
||||||
|
|||||||
191
UPGRADE.rst
191
UPGRADE.rst
@@ -1,7 +1,188 @@
|
|||||||
Upgrading Synapse
|
Upgrading to v0.8.0
|
||||||
=================
|
===================
|
||||||
|
|
||||||
This document has moved to the `Synapse documentation website <https://matrix-org.github.io/synapse/latest/upgrade>`_.
|
Servers which use captchas will need to add their public key to::
|
||||||
Please update your links.
|
|
||||||
|
|
||||||
The markdown source is available in `docs/upgrade.md <docs/upgrade.md>`_.
|
static/client/register/register_config.js
|
||||||
|
|
||||||
|
window.matrixRegistrationConfig = {
|
||||||
|
recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||||
|
};
|
||||||
|
|
||||||
|
This is required in order to support registration fallback (typically used on
|
||||||
|
mobile devices).
|
||||||
|
|
||||||
|
|
||||||
|
Upgrading to v0.7.0
|
||||||
|
===================
|
||||||
|
|
||||||
|
New dependencies are:
|
||||||
|
|
||||||
|
- pydenticon
|
||||||
|
- simplejson
|
||||||
|
- syutil
|
||||||
|
- matrix-angular-sdk
|
||||||
|
|
||||||
|
To pull in these dependencies in a virtual env, run::
|
||||||
|
|
||||||
|
python synapse/python_dependencies.py | xargs -n 1 pip install
|
||||||
|
|
||||||
|
Upgrading to v0.6.0
|
||||||
|
===================
|
||||||
|
|
||||||
|
To pull in new dependencies, run::
|
||||||
|
|
||||||
|
python setup.py develop --user
|
||||||
|
|
||||||
|
This update includes a change to the database schema. To upgrade you first need
|
||||||
|
to upgrade the database by running::
|
||||||
|
|
||||||
|
python scripts/upgrade_db_to_v0.6.0.py <db> <server_name> <signing_key>
|
||||||
|
|
||||||
|
Where `<db>` is the location of the database, `<server_name>` is the
|
||||||
|
server name as specified in the synapse configuration, and `<signing_key>` is
|
||||||
|
the location of the signing key as specified in the synapse configuration.
|
||||||
|
|
||||||
|
This may take some time to complete. Failures of signatures and content hashes
|
||||||
|
can safely be ignored.
|
||||||
|
|
||||||
|
|
||||||
|
Upgrading to v0.5.1
|
||||||
|
===================
|
||||||
|
|
||||||
|
Depending on precisely when you installed v0.5.0 you may have ended up with
|
||||||
|
a stale release of the reference matrix webclient installed as a python module.
|
||||||
|
To uninstall it and ensure you are depending on the latest module, please run::
|
||||||
|
|
||||||
|
$ pip uninstall syweb
|
||||||
|
|
||||||
|
Upgrading to v0.5.0
|
||||||
|
===================
|
||||||
|
|
||||||
|
The webclient has been split out into a seperate repository/pacakage in this
|
||||||
|
release. Before you restart your homeserver you will need to pull in the
|
||||||
|
webclient package by running::
|
||||||
|
|
||||||
|
python setup.py develop --user
|
||||||
|
|
||||||
|
This release completely changes the database schema and so requires upgrading
|
||||||
|
it before starting the new version of the homeserver.
|
||||||
|
|
||||||
|
The script "database-prepare-for-0.5.0.sh" should be used to upgrade the
|
||||||
|
database. This will save all user information, such as logins and profiles,
|
||||||
|
but will otherwise purge the database. This includes messages, which
|
||||||
|
rooms the home server was a member of and room alias mappings.
|
||||||
|
|
||||||
|
If you would like to keep your history, please take a copy of your database
|
||||||
|
file and ask for help in #matrix:matrix.org. The upgrade process is,
|
||||||
|
unfortunately, non trivial and requires human intervention to resolve any
|
||||||
|
resulting conflicts during the upgrade process.
|
||||||
|
|
||||||
|
Before running the command the homeserver should be first completely
|
||||||
|
shutdown. To run it, simply specify the location of the database, e.g.:
|
||||||
|
|
||||||
|
./scripts/database-prepare-for-0.5.0.sh "homeserver.db"
|
||||||
|
|
||||||
|
Once this has successfully completed it will be safe to restart the
|
||||||
|
homeserver. You may notice that the homeserver takes a few seconds longer to
|
||||||
|
restart than usual as it reinitializes the database.
|
||||||
|
|
||||||
|
On startup of the new version, users can either rejoin remote rooms using room
|
||||||
|
aliases or by being reinvited. Alternatively, if any other homeserver sends a
|
||||||
|
message to a room that the homeserver was previously in the local HS will
|
||||||
|
automatically rejoin the room.
|
||||||
|
|
||||||
|
Upgrading to v0.4.0
|
||||||
|
===================
|
||||||
|
|
||||||
|
This release needs an updated syutil version. Run::
|
||||||
|
|
||||||
|
python setup.py develop
|
||||||
|
|
||||||
|
You will also need to upgrade your configuration as the signing key format has
|
||||||
|
changed. Run::
|
||||||
|
|
||||||
|
python -m synapse.app.homeserver --config-path <CONFIG> --generate-config
|
||||||
|
|
||||||
|
|
||||||
|
Upgrading to v0.3.0
|
||||||
|
===================
|
||||||
|
|
||||||
|
This registration API now closely matches the login API. This introduces a bit
|
||||||
|
more backwards and forwards between the HS and the client, but this improves
|
||||||
|
the overall flexibility of the API. You can now GET on /register to retrieve a list
|
||||||
|
of valid registration flows. Upon choosing one, they are submitted in the same
|
||||||
|
way as login, e.g::
|
||||||
|
|
||||||
|
{
|
||||||
|
type: m.login.password,
|
||||||
|
user: foo,
|
||||||
|
password: bar
|
||||||
|
}
|
||||||
|
|
||||||
|
The default HS supports 2 flows, with and without Identity Server email
|
||||||
|
authentication. Enabling captcha on the HS will add in an extra step to all
|
||||||
|
flows: ``m.login.recaptcha`` which must be completed before you can transition
|
||||||
|
to the next stage. There is a new login type: ``m.login.email.identity`` which
|
||||||
|
contains the ``threepidCreds`` key which were previously sent in the original
|
||||||
|
register request. For more information on this, see the specification.
|
||||||
|
|
||||||
|
Web Client
|
||||||
|
----------
|
||||||
|
|
||||||
|
The VoIP specification has changed between v0.2.0 and v0.3.0. Users should
|
||||||
|
refresh any browser tabs to get the latest web client code. Users on
|
||||||
|
v0.2.0 of the web client will not be able to call those on v0.3.0 and
|
||||||
|
vice versa.
|
||||||
|
|
||||||
|
|
||||||
|
Upgrading to v0.2.0
|
||||||
|
===================
|
||||||
|
|
||||||
|
The home server now requires setting up of SSL config before it can run. To
|
||||||
|
automatically generate default config use::
|
||||||
|
|
||||||
|
$ python synapse/app/homeserver.py \
|
||||||
|
--server-name machine.my.domain.name \
|
||||||
|
--bind-port 8448 \
|
||||||
|
--config-path homeserver.config \
|
||||||
|
--generate-config
|
||||||
|
|
||||||
|
This config can be edited if desired, for example to specify a different SSL
|
||||||
|
certificate to use. Once done you can run the home server using::
|
||||||
|
|
||||||
|
$ python synapse/app/homeserver.py --config-path homeserver.config
|
||||||
|
|
||||||
|
See the README.rst for more information.
|
||||||
|
|
||||||
|
Also note that some config options have been renamed, including:
|
||||||
|
|
||||||
|
- "host" to "server-name"
|
||||||
|
- "database" to "database-path"
|
||||||
|
- "port" to "bind-port" and "unsecure-port"
|
||||||
|
|
||||||
|
|
||||||
|
Upgrading to v0.0.1
|
||||||
|
===================
|
||||||
|
|
||||||
|
This release completely changes the database schema and so requires upgrading
|
||||||
|
it before starting the new version of the homeserver.
|
||||||
|
|
||||||
|
The script "database-prepare-for-0.0.1.sh" should be used to upgrade the
|
||||||
|
database. This will save all user information, such as logins and profiles,
|
||||||
|
but will otherwise purge the database. This includes messages, which
|
||||||
|
rooms the home server was a member of and room alias mappings.
|
||||||
|
|
||||||
|
Before running the command the homeserver should be first completely
|
||||||
|
shutdown. To run it, simply specify the location of the database, e.g.:
|
||||||
|
|
||||||
|
./scripts/database-prepare-for-0.0.1.sh "homeserver.db"
|
||||||
|
|
||||||
|
Once this has successfully completed it will be safe to restart the
|
||||||
|
homeserver. You may notice that the homeserver takes a few seconds longer to
|
||||||
|
restart than usual as it reinitializes the database.
|
||||||
|
|
||||||
|
On startup of the new version, users can either rejoin remote rooms using room
|
||||||
|
aliases or by being reinvited. Alternatively, if any other homeserver sends a
|
||||||
|
message to a room that the homeserver was previously in the local HS will
|
||||||
|
automatically rejoin the room.
|
||||||
|
|||||||
39
book.toml
39
book.toml
@@ -1,39 +0,0 @@
|
|||||||
# Documentation for possible options in this file is at
|
|
||||||
# https://rust-lang.github.io/mdBook/format/config.html
|
|
||||||
[book]
|
|
||||||
title = "Synapse"
|
|
||||||
authors = ["The Matrix.org Foundation C.I.C."]
|
|
||||||
language = "en"
|
|
||||||
multilingual = false
|
|
||||||
|
|
||||||
# The directory that documentation files are stored in
|
|
||||||
src = "docs"
|
|
||||||
|
|
||||||
[build]
|
|
||||||
# Prevent markdown pages from being automatically generated when they're
|
|
||||||
# linked to in SUMMARY.md
|
|
||||||
create-missing = false
|
|
||||||
|
|
||||||
[output.html]
|
|
||||||
# The URL visitors will be directed to when they try to edit a page
|
|
||||||
edit-url-template = "https://github.com/matrix-org/synapse/edit/develop/{path}"
|
|
||||||
|
|
||||||
# Remove the numbers that appear before each item in the sidebar, as they can
|
|
||||||
# get quite messy as we nest deeper
|
|
||||||
no-section-label = true
|
|
||||||
|
|
||||||
# The source code URL of the repository
|
|
||||||
git-repository-url = "https://github.com/matrix-org/synapse"
|
|
||||||
|
|
||||||
# The path that the docs are hosted on
|
|
||||||
site-url = "/synapse/"
|
|
||||||
|
|
||||||
# Additional HTML, JS, CSS that's injected into each page of the book.
|
|
||||||
# More information available in docs/website_files/README.md
|
|
||||||
additional-css = [
|
|
||||||
"docs/website_files/table-of-contents.css",
|
|
||||||
"docs/website_files/remove-nav-buttons.css",
|
|
||||||
"docs/website_files/indent-section-headers.css",
|
|
||||||
]
|
|
||||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
|
||||||
theme = "docs/website_files/theme"
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
# A build script for poetry that adds the rust extension.
|
|
||||||
|
|
||||||
import os
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
from setuptools_rust import Binding, RustExtension
|
|
||||||
|
|
||||||
|
|
||||||
def build(setup_kwargs: Dict[str, Any]) -> None:
|
|
||||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
|
||||||
|
|
||||||
extension = RustExtension(
|
|
||||||
target="synapse.synapse_rust",
|
|
||||||
path=cargo_toml_path,
|
|
||||||
binding=Binding.PyO3,
|
|
||||||
py_limited_api=True,
|
|
||||||
# We force always building in release mode, as we can't tell the
|
|
||||||
# difference between using `poetry` in development vs production.
|
|
||||||
debug=False,
|
|
||||||
)
|
|
||||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
|
||||||
setup_kwargs["zip_safe"] = False
|
|
||||||
1
changelog.d/.gitignore
vendored
1
changelog.d/.gitignore
vendored
@@ -1 +0,0 @@
|
|||||||
!.gitignore
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
Community Contributions
|
|
||||||
=======================
|
|
||||||
|
|
||||||
Everything in this directory are projects submitted by the community that may be useful
|
|
||||||
to others. As such, the project maintainers cannot guarantee support, stability
|
|
||||||
or backwards compatibility of these projects.
|
|
||||||
|
|
||||||
Files in this directory should *not* be relied on directly, as they may not
|
|
||||||
continue to work or exist in future. If you wish to use any of these files then
|
|
||||||
they should be copied to avoid them breaking from underneath you.
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -15,8 +15,11 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
""" Starts a synapse client console. """
|
""" Starts a synapse client console. """
|
||||||
|
|
||||||
|
from twisted.internet import reactor, defer, threads
|
||||||
|
from http import TwistedHttpClient
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import binascii
|
|
||||||
import cmd
|
import cmd
|
||||||
import getpass
|
import getpass
|
||||||
import json
|
import json
|
||||||
@@ -24,23 +27,21 @@ import shlex
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from http import TwistedHttpClient
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import urlparse
|
import urlparse
|
||||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
|
||||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
|
||||||
|
|
||||||
from twisted.internet import defer, reactor, threads
|
import nacl.signing
|
||||||
|
import nacl.encoding
|
||||||
|
|
||||||
|
from syutil.crypto.jsonsign import verify_signed_json, SignatureVerifyException
|
||||||
|
|
||||||
CONFIG_JSON = "cmdclient_config.json"
|
CONFIG_JSON = "cmdclient_config.json"
|
||||||
|
|
||||||
# TODO: The concept of trusted identity servers has been deprecated. This option and checks
|
TRUSTED_ID_SERVERS = [
|
||||||
# should be removed
|
'localhost:8001'
|
||||||
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
]
|
||||||
|
|
||||||
|
|
||||||
class SynapseCmd(cmd.Cmd):
|
class SynapseCmd(cmd.Cmd):
|
||||||
|
|
||||||
"""Basic synapse command-line processor.
|
"""Basic synapse command-line processor.
|
||||||
|
|
||||||
This processes commands from the user and calls the relevant HTTP methods.
|
This processes commands from the user and calls the relevant HTTP methods.
|
||||||
@@ -57,7 +58,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"token": token,
|
"token": token,
|
||||||
"verbose": "on",
|
"verbose": "on",
|
||||||
"complete_usernames": "on",
|
"complete_usernames": "on",
|
||||||
"send_delivery_receipts": "on",
|
"send_delivery_receipts": "on"
|
||||||
}
|
}
|
||||||
self.path_prefix = "/_matrix/client/api/v1"
|
self.path_prefix = "/_matrix/client/api/v1"
|
||||||
self.event_stream_token = "END"
|
self.event_stream_token = "END"
|
||||||
@@ -92,7 +93,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
return self.config["user"].split(":")[1]
|
return self.config["user"].split(":")[1]
|
||||||
|
|
||||||
def do_config(self, line):
|
def do_config(self, line):
|
||||||
"""Show the config for this client: "config"
|
""" Show the config for this client: "config"
|
||||||
Edit a key value mapping: "config key value" e.g. "config token 1234"
|
Edit a key value mapping: "config key value" e.g. "config token 1234"
|
||||||
Config variables:
|
Config variables:
|
||||||
user: The username to auth with.
|
user: The username to auth with.
|
||||||
@@ -108,7 +109,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
by using $. E.g. 'config roomid room1' then 'raw get /rooms/$roomid'.
|
by using $. E.g. 'config roomid room1' then 'raw get /rooms/$roomid'.
|
||||||
"""
|
"""
|
||||||
if len(line) == 0:
|
if len(line) == 0:
|
||||||
print(json.dumps(self.config, indent=4))
|
print json.dumps(self.config, indent=4)
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -118,11 +119,12 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
config_rules = [ # key, valid_values
|
config_rules = [ # key, valid_values
|
||||||
("verbose", ["on", "off"]),
|
("verbose", ["on", "off"]),
|
||||||
("complete_usernames", ["on", "off"]),
|
("complete_usernames", ["on", "off"]),
|
||||||
("send_delivery_receipts", ["on", "off"]),
|
("send_delivery_receipts", ["on", "off"])
|
||||||
]
|
]
|
||||||
for key, valid_vals in config_rules:
|
for key, valid_vals in config_rules:
|
||||||
if key == args["key"] and args["val"] not in valid_vals:
|
if key == args["key"] and args["val"] not in valid_vals:
|
||||||
print("%s value must be one of %s" % (args["key"], valid_vals))
|
print "%s value must be one of %s" % (args["key"],
|
||||||
|
valid_vals)
|
||||||
return
|
return
|
||||||
|
|
||||||
# toggle the http client verbosity
|
# toggle the http client verbosity
|
||||||
@@ -131,11 +133,11 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
# assign the new config
|
# assign the new config
|
||||||
self.config[args["key"]] = args["val"]
|
self.config[args["key"]] = args["val"]
|
||||||
print(json.dumps(self.config, indent=4))
|
print json.dumps(self.config, indent=4)
|
||||||
|
|
||||||
save_config(self.config)
|
save_config(self.config)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print e
|
||||||
|
|
||||||
def do_register(self, line):
|
def do_register(self, line):
|
||||||
"""Registers for a new account: "register <userid> <noupdate>"
|
"""Registers for a new account: "register <userid> <noupdate>"
|
||||||
@@ -151,32 +153,33 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
pwd = getpass.getpass("Type a password for this user: ")
|
pwd = getpass.getpass("Type a password for this user: ")
|
||||||
pwd2 = getpass.getpass("Retype the password: ")
|
pwd2 = getpass.getpass("Retype the password: ")
|
||||||
if pwd != pwd2 or len(pwd) == 0:
|
if pwd != pwd2 or len(pwd) == 0:
|
||||||
print("Password mismatch.")
|
print "Password mismatch."
|
||||||
pwd = None
|
pwd = None
|
||||||
else:
|
else:
|
||||||
password = pwd
|
password = pwd
|
||||||
|
|
||||||
body = {"type": "m.login.password"}
|
body = {
|
||||||
|
"type": "m.login.password"
|
||||||
|
}
|
||||||
if "userid" in args:
|
if "userid" in args:
|
||||||
body["user"] = args["userid"]
|
body["user"] = args["userid"]
|
||||||
if password:
|
if password:
|
||||||
body["password"] = password
|
body["password"] = password
|
||||||
|
|
||||||
reactor.callFromThread(self._do_register, body, "noupdate" not in args)
|
reactor.callFromThread(self._do_register, body,
|
||||||
|
"noupdate" not in args)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_register(self, data, update_config):
|
def _do_register(self, data, update_config):
|
||||||
# check the registration flows
|
# check the registration flows
|
||||||
url = self._url() + "/register"
|
url = self._url() + "/register"
|
||||||
json_res = yield self.http_client.do_request("GET", url)
|
json_res = yield self.http_client.do_request("GET", url)
|
||||||
print(json.dumps(json_res, indent=4))
|
print json.dumps(json_res, indent=4)
|
||||||
|
|
||||||
passwordFlow = None
|
passwordFlow = None
|
||||||
for flow in json_res["flows"]:
|
for flow in json_res["flows"]:
|
||||||
if flow["type"] == "m.login.recaptcha" or (
|
if flow["type"] == "m.login.recaptcha" or ("stages" in flow and "m.login.recaptcha" in flow["stages"]):
|
||||||
"stages" in flow and "m.login.recaptcha" in flow["stages"]
|
print "Unable to register: Home server requires captcha."
|
||||||
):
|
|
||||||
print("Unable to register: Home server requires captcha.")
|
|
||||||
return
|
return
|
||||||
if flow["type"] == "m.login.password" and "stages" not in flow:
|
if flow["type"] == "m.login.password" and "stages" not in flow:
|
||||||
passwordFlow = flow
|
passwordFlow = flow
|
||||||
@@ -186,7 +189,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
return
|
return
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=data)
|
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||||
print(json.dumps(json_res, indent=4))
|
print json.dumps(json_res, indent=4)
|
||||||
if update_config and "user_id" in json_res:
|
if update_config and "user_id" in json_res:
|
||||||
self.config["user"] = json_res["user_id"]
|
self.config["user"] = json_res["user_id"]
|
||||||
self.config["token"] = json_res["access_token"]
|
self.config["token"] = json_res["access_token"]
|
||||||
@@ -198,7 +201,9 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
args = self._parse(line, ["user_id"], force_keys=True)
|
args = self._parse(line, ["user_id"], force_keys=True)
|
||||||
can_login = threads.blockingCallFromThread(reactor, self._check_can_login)
|
can_login = threads.blockingCallFromThread(
|
||||||
|
reactor,
|
||||||
|
self._check_can_login)
|
||||||
if can_login:
|
if can_login:
|
||||||
p = getpass.getpass("Enter your password: ")
|
p = getpass.getpass("Enter your password: ")
|
||||||
user = args["user_id"]
|
user = args["user_id"]
|
||||||
@@ -206,25 +211,29 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
domain = self._domain()
|
domain = self._domain()
|
||||||
if domain:
|
if domain:
|
||||||
user = "@" + user + ":" + domain
|
user = "@" + user + ":" + domain
|
||||||
|
|
||||||
reactor.callFromThread(self._do_login, user, p)
|
reactor.callFromThread(self._do_login, user, p)
|
||||||
# print " got %s " % p
|
#print " got %s " % p
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print e
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_login(self, user, password):
|
def _do_login(self, user, password):
|
||||||
path = "/login"
|
path = "/login"
|
||||||
data = {"user": user, "password": password, "type": "m.login.password"}
|
data = {
|
||||||
|
"user": user,
|
||||||
|
"password": password,
|
||||||
|
"type": "m.login.password"
|
||||||
|
}
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=data)
|
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||||
print(json_res)
|
print json_res
|
||||||
|
|
||||||
if "access_token" in json_res:
|
if "access_token" in json_res:
|
||||||
self.config["user"] = user
|
self.config["user"] = user
|
||||||
self.config["token"] = json_res["access_token"]
|
self.config["token"] = json_res["access_token"]
|
||||||
save_config(self.config)
|
save_config(self.config)
|
||||||
print("Login successful.")
|
print "Login successful."
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _check_can_login(self):
|
def _check_can_login(self):
|
||||||
@@ -233,19 +242,18 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
# submitting!
|
# submitting!
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
json_res = yield self.http_client.do_request("GET", url)
|
json_res = yield self.http_client.do_request("GET", url)
|
||||||
print(json_res)
|
print json_res
|
||||||
|
|
||||||
if "flows" not in json_res:
|
if "flows" not in json_res:
|
||||||
print("Failed to find any login flows.")
|
print "Failed to find any login flows."
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
|
|
||||||
flow = json_res["flows"][0] # assume first is the one we want.
|
flow = json_res["flows"][0] # assume first is the one we want.
|
||||||
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
if ("type" not in flow or "m.login.password" != flow["type"] or
|
||||||
|
"stages" in flow):
|
||||||
fallback_url = self._url() + "/login/fallback"
|
fallback_url = self._url() + "/login/fallback"
|
||||||
print(
|
print ("Unable to login via the command line client. Please visit "
|
||||||
"Unable to login via the command line client. Please visit "
|
"%s to login." % fallback_url)
|
||||||
"%s to login." % fallback_url
|
|
||||||
)
|
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
|
|
||||||
@@ -255,34 +263,21 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
<clientSecret> A string of characters generated when requesting an email that you'll supply in subsequent calls to identify yourself
|
<clientSecret> A string of characters generated when requesting an email that you'll supply in subsequent calls to identify yourself
|
||||||
<sendAttempt> The number of times the user has requested an email. Leave this the same between requests to retry the request at the transport level. Increment it to request that the email be sent again.
|
<sendAttempt> The number of times the user has requested an email. Leave this the same between requests to retry the request at the transport level. Increment it to request that the email be sent again.
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ["address", "clientSecret", "sendAttempt"])
|
args = self._parse(line, ['address', 'clientSecret', 'sendAttempt'])
|
||||||
|
|
||||||
postArgs = {
|
postArgs = {'email': args['address'], 'clientSecret': args['clientSecret'], 'sendAttempt': args['sendAttempt']}
|
||||||
"email": args["address"],
|
|
||||||
"clientSecret": args["clientSecret"],
|
|
||||||
"sendAttempt": args["sendAttempt"],
|
|
||||||
}
|
|
||||||
|
|
||||||
reactor.callFromThread(self._do_emailrequest, postArgs)
|
reactor.callFromThread(self._do_emailrequest, postArgs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailrequest(self, args):
|
def _do_emailrequest(self, args):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/requestToken"
|
||||||
url = (
|
|
||||||
self._identityServerUrl()
|
|
||||||
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
|
||||||
)
|
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(
|
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
||||||
"POST",
|
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
||||||
url,
|
print json_res
|
||||||
data=urllib.urlencode(args),
|
if 'sid' in json_res:
|
||||||
jsonreq=False,
|
print "Token sent. Your session ID is %s" % (json_res['sid'])
|
||||||
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
|
||||||
)
|
|
||||||
print(json_res)
|
|
||||||
if "sid" in json_res:
|
|
||||||
print("Token sent. Your session ID is %s" % (json_res["sid"]))
|
|
||||||
|
|
||||||
def do_emailvalidate(self, line):
|
def do_emailvalidate(self, line):
|
||||||
"""Validate and associate a third party ID
|
"""Validate and associate a third party ID
|
||||||
@@ -290,58 +285,39 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
<token> The token sent to your third party identifier address
|
<token> The token sent to your third party identifier address
|
||||||
<clientSecret> The same clientSecret you supplied in requestToken
|
<clientSecret> The same clientSecret you supplied in requestToken
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ["sid", "token", "clientSecret"])
|
args = self._parse(line, ['sid', 'token', 'clientSecret'])
|
||||||
|
|
||||||
postArgs = {
|
postArgs = { 'sid' : args['sid'], 'token' : args['token'], 'clientSecret': args['clientSecret'] }
|
||||||
"sid": args["sid"],
|
|
||||||
"token": args["token"],
|
|
||||||
"clientSecret": args["clientSecret"],
|
|
||||||
}
|
|
||||||
|
|
||||||
reactor.callFromThread(self._do_emailvalidate, postArgs)
|
reactor.callFromThread(self._do_emailvalidate, postArgs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailvalidate(self, args):
|
def _do_emailvalidate(self, args):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/submitToken"
|
||||||
url = (
|
|
||||||
self._identityServerUrl()
|
|
||||||
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
|
||||||
)
|
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(
|
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
||||||
"POST",
|
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
||||||
url,
|
print json_res
|
||||||
data=urllib.urlencode(args),
|
|
||||||
jsonreq=False,
|
|
||||||
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
|
||||||
)
|
|
||||||
print(json_res)
|
|
||||||
|
|
||||||
def do_3pidbind(self, line):
|
def do_3pidbind(self, line):
|
||||||
"""Validate and associate a third party ID
|
"""Validate and associate a third party ID
|
||||||
<sid> The session ID (sid) given to you in the response to requestToken
|
<sid> The session ID (sid) given to you in the response to requestToken
|
||||||
<clientSecret> The same clientSecret you supplied in requestToken
|
<clientSecret> The same clientSecret you supplied in requestToken
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ["sid", "clientSecret"])
|
args = self._parse(line, ['sid', 'clientSecret'])
|
||||||
|
|
||||||
postArgs = {"sid": args["sid"], "clientSecret": args["clientSecret"]}
|
postArgs = { 'sid' : args['sid'], 'clientSecret': args['clientSecret'] }
|
||||||
postArgs["mxid"] = self.config["user"]
|
postArgs['mxid'] = self.config["user"]
|
||||||
|
|
||||||
reactor.callFromThread(self._do_3pidbind, postArgs)
|
reactor.callFromThread(self._do_3pidbind, postArgs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_3pidbind(self, args):
|
def _do_3pidbind(self, args):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
url = self._identityServerUrl()+"/_matrix/identity/api/v1/3pid/bind"
|
||||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(
|
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
||||||
"POST",
|
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
||||||
url,
|
print json_res
|
||||||
data=urllib.urlencode(args),
|
|
||||||
jsonreq=False,
|
|
||||||
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
|
||||||
)
|
|
||||||
print(json_res)
|
|
||||||
|
|
||||||
def do_join(self, line):
|
def do_join(self, line):
|
||||||
"""Joins a room: "join <roomid>" """
|
"""Joins a room: "join <roomid>" """
|
||||||
@@ -349,7 +325,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
args = self._parse(line, ["roomid"], force_keys=True)
|
args = self._parse(line, ["roomid"], force_keys=True)
|
||||||
self._do_membership_change(args["roomid"], "join", self._usr())
|
self._do_membership_change(args["roomid"], "join", self._usr())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print e
|
||||||
|
|
||||||
def do_joinalias(self, line):
|
def do_joinalias(self, line):
|
||||||
try:
|
try:
|
||||||
@@ -357,34 +333,36 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
path = "/join/%s" % urllib.quote(args["roomname"])
|
path = "/join/%s" % urllib.quote(args["roomname"])
|
||||||
reactor.callFromThread(self._run_and_pprint, "POST", path, {})
|
reactor.callFromThread(self._run_and_pprint, "POST", path, {})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print e
|
||||||
|
|
||||||
def do_topic(self, line):
|
def do_topic(self, line):
|
||||||
""" "topic [set|get] <roomid> [<newtopic>]"
|
""""topic [set|get] <roomid> [<newtopic>]"
|
||||||
Set the topic for a room: topic set <roomid> <newtopic>
|
Set the topic for a room: topic set <roomid> <newtopic>
|
||||||
Get the topic for a room: topic get <roomid>
|
Get the topic for a room: topic get <roomid>
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
args = self._parse(line, ["action", "roomid", "topic"])
|
args = self._parse(line, ["action", "roomid", "topic"])
|
||||||
if "action" not in args or "roomid" not in args:
|
if "action" not in args or "roomid" not in args:
|
||||||
print("Must specify set|get and a room ID.")
|
print "Must specify set|get and a room ID."
|
||||||
return
|
return
|
||||||
if args["action"].lower() not in ["set", "get"]:
|
if args["action"].lower() not in ["set", "get"]:
|
||||||
print("Must specify set|get, not %s" % args["action"])
|
print "Must specify set|get, not %s" % args["action"]
|
||||||
return
|
return
|
||||||
|
|
||||||
path = "/rooms/%s/topic" % urllib.quote(args["roomid"])
|
path = "/rooms/%s/topic" % urllib.quote(args["roomid"])
|
||||||
|
|
||||||
if args["action"].lower() == "set":
|
if args["action"].lower() == "set":
|
||||||
if "topic" not in args:
|
if "topic" not in args:
|
||||||
print("Must specify a new topic.")
|
print "Must specify a new topic."
|
||||||
return
|
return
|
||||||
body = {"topic": args["topic"]}
|
body = {
|
||||||
|
"topic": args["topic"]
|
||||||
|
}
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body)
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, body)
|
||||||
elif args["action"].lower() == "get":
|
elif args["action"].lower() == "get":
|
||||||
reactor.callFromThread(self._run_and_pprint, "GET", path)
|
reactor.callFromThread(self._run_and_pprint, "GET", path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print e
|
||||||
|
|
||||||
def do_invite(self, line):
|
def do_invite(self, line):
|
||||||
"""Invite a user to a room: "invite <userid> <roomid>" """
|
"""Invite a user to a room: "invite <userid> <roomid>" """
|
||||||
@@ -395,66 +373,49 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
reactor.callFromThread(self._do_invite, args["roomid"], user_id)
|
reactor.callFromThread(self._do_invite, args["roomid"], user_id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print e
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_invite(self, roomid, userstring):
|
def _do_invite(self, roomid, userstring):
|
||||||
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
if (not userstring.startswith('@') and
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
self._is_on("complete_usernames")):
|
||||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
url = self._identityServerUrl()+"/_matrix/identity/api/v1/lookup"
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(
|
json_res = yield self.http_client.do_request("GET", url, qparams={'medium':'email','address':userstring})
|
||||||
"GET", url, qparams={"medium": "email", "address": userstring}
|
|
||||||
)
|
|
||||||
|
|
||||||
mxid = None
|
mxid = None
|
||||||
|
|
||||||
if "mxid" in json_res and "signatures" in json_res:
|
if 'mxid' in json_res and 'signatures' in json_res:
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
url = self._identityServerUrl()+"/_matrix/identity/api/v1/pubkey/ed25519"
|
||||||
url = (
|
|
||||||
self._identityServerUrl()
|
|
||||||
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
|
||||||
)
|
|
||||||
|
|
||||||
pubKey = None
|
pubKey = None
|
||||||
pubKeyObj = yield self.http_client.do_request("GET", url)
|
pubKeyObj = yield self.http_client.do_request("GET", url)
|
||||||
if "public_key" in pubKeyObj:
|
if 'public_key' in pubKeyObj:
|
||||||
pubKey = decode_verify_key_bytes(
|
pubKey = nacl.signing.VerifyKey(pubKeyObj['public_key'], encoder=nacl.encoding.HexEncoder)
|
||||||
NACL_ED25519, binascii.unhexlify(pubKeyObj["public_key"])
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
print("No public key found in pubkey response!")
|
print "No public key found in pubkey response!"
|
||||||
|
|
||||||
sigValid = False
|
sigValid = False
|
||||||
|
|
||||||
if pubKey:
|
if pubKey:
|
||||||
for signame in json_res["signatures"]:
|
for signame in json_res['signatures']:
|
||||||
if signame not in TRUSTED_ID_SERVERS:
|
if signame not in TRUSTED_ID_SERVERS:
|
||||||
print(
|
print "Ignoring signature from untrusted server %s" % (signame)
|
||||||
"Ignoring signature from untrusted server %s"
|
|
||||||
% (signame)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
verify_signed_json(json_res, signame, pubKey)
|
verify_signed_json(json_res, signame, pubKey)
|
||||||
sigValid = True
|
sigValid = True
|
||||||
print(
|
print "Mapping %s -> %s correctly signed by %s" % (userstring, json_res['mxid'], signame)
|
||||||
"Mapping %s -> %s correctly signed by %s"
|
|
||||||
% (userstring, json_res["mxid"], signame)
|
|
||||||
)
|
|
||||||
break
|
break
|
||||||
except SignatureVerifyException as e:
|
except SignatureVerifyException as e:
|
||||||
print("Invalid signature from %s" % (signame))
|
print "Invalid signature from %s" % (signame)
|
||||||
print(e)
|
print e
|
||||||
|
|
||||||
if sigValid:
|
if sigValid:
|
||||||
print("Resolved 3pid %s to %s" % (userstring, json_res["mxid"]))
|
print "Resolved 3pid %s to %s" % (userstring, json_res['mxid'])
|
||||||
mxid = json_res["mxid"]
|
mxid = json_res['mxid']
|
||||||
else:
|
else:
|
||||||
print(
|
print "Got association for %s but couldn't verify signature" % (userstring)
|
||||||
"Got association for %s but couldn't verify signature"
|
|
||||||
% (userstring)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not mxid:
|
if not mxid:
|
||||||
mxid = "@" + userstring + ":" + self._domain()
|
mxid = "@" + userstring + ":" + self._domain()
|
||||||
@@ -467,17 +428,18 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
args = self._parse(line, ["roomid"], force_keys=True)
|
args = self._parse(line, ["roomid"], force_keys=True)
|
||||||
self._do_membership_change(args["roomid"], "leave", self._usr())
|
self._do_membership_change(args["roomid"], "leave", self._usr())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print e
|
||||||
|
|
||||||
def do_send(self, line):
|
def do_send(self, line):
|
||||||
"""Sends a message. "send <roomid> <body>" """
|
"""Sends a message. "send <roomid> <body>" """
|
||||||
args = self._parse(line, ["roomid", "body"])
|
args = self._parse(line, ["roomid", "body"])
|
||||||
txn_id = "txn%s" % int(time.time())
|
txn_id = "txn%s" % int(time.time())
|
||||||
path = "/rooms/%s/send/m.room.message/%s" % (
|
path = "/rooms/%s/send/m.room.message/%s" % (urllib.quote(args["roomid"]),
|
||||||
urllib.quote(args["roomid"]),
|
txn_id)
|
||||||
txn_id,
|
body_json = {
|
||||||
)
|
"msgtype": "m.text",
|
||||||
body_json = {"msgtype": "m.text", "body": args["body"]}
|
"body": args["body"]
|
||||||
|
}
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body_json)
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, body_json)
|
||||||
|
|
||||||
def do_list(self, line):
|
def do_list(self, line):
|
||||||
@@ -490,11 +452,11 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"list messages <roomid> from=END&to=START&limit=3"
|
"list messages <roomid> from=END&to=START&limit=3"
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ["type", "roomid", "qp"])
|
args = self._parse(line, ["type", "roomid", "qp"])
|
||||||
if "type" not in args or "roomid" not in args:
|
if not "type" in args or not "roomid" in args:
|
||||||
print("Must specify type and room ID.")
|
print "Must specify type and room ID."
|
||||||
return
|
return
|
||||||
if args["type"] not in ["members", "messages"]:
|
if args["type"] not in ["members", "messages"]:
|
||||||
print("Unrecognised type: %s" % args["type"])
|
print "Unrecognised type: %s" % args["type"]
|
||||||
return
|
return
|
||||||
room_id = args["roomid"]
|
room_id = args["roomid"]
|
||||||
path = "/rooms/%s/%s" % (urllib.quote(room_id), args["type"])
|
path = "/rooms/%s/%s" % (urllib.quote(room_id), args["type"])
|
||||||
@@ -505,11 +467,12 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
key_value = key_value_str.split("=")
|
key_value = key_value_str.split("=")
|
||||||
qp[key_value[0]] = key_value[1]
|
qp[key_value[0]] = key_value[1]
|
||||||
except Exception:
|
except:
|
||||||
print("Bad query param: %s" % key_value)
|
print "Bad query param: %s" % key_value
|
||||||
return
|
return
|
||||||
|
|
||||||
reactor.callFromThread(self._run_and_pprint, "GET", path, query_params=qp)
|
reactor.callFromThread(self._run_and_pprint, "GET", path,
|
||||||
|
query_params=qp)
|
||||||
|
|
||||||
def do_create(self, line):
|
def do_create(self, line):
|
||||||
"""Creates a room.
|
"""Creates a room.
|
||||||
@@ -545,22 +508,14 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
args = self._parse(line, ["method", "path", "data"])
|
args = self._parse(line, ["method", "path", "data"])
|
||||||
# sanity check
|
# sanity check
|
||||||
if "method" not in args or "path" not in args:
|
if "method" not in args or "path" not in args:
|
||||||
print("Must specify path and method.")
|
print "Must specify path and method."
|
||||||
return
|
return
|
||||||
|
|
||||||
args["method"] = args["method"].upper()
|
args["method"] = args["method"].upper()
|
||||||
valid_methods = [
|
valid_methods = ["PUT", "GET", "POST", "DELETE",
|
||||||
"PUT",
|
"XPUT", "XGET", "XPOST", "XDELETE"]
|
||||||
"GET",
|
|
||||||
"POST",
|
|
||||||
"DELETE",
|
|
||||||
"XPUT",
|
|
||||||
"XGET",
|
|
||||||
"XPOST",
|
|
||||||
"XDELETE",
|
|
||||||
]
|
|
||||||
if args["method"] not in valid_methods:
|
if args["method"] not in valid_methods:
|
||||||
print("Unsupported method: %s" % args["method"])
|
print "Unsupported method: %s" % args["method"]
|
||||||
return
|
return
|
||||||
|
|
||||||
if "data" not in args:
|
if "data" not in args:
|
||||||
@@ -569,7 +524,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
args["data"] = json.loads(args["data"])
|
args["data"] = json.loads(args["data"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Data is not valid JSON. %s" % e)
|
print "Data is not valid JSON. %s" % e
|
||||||
return
|
return
|
||||||
|
|
||||||
qp = {"access_token": self._tok()}
|
qp = {"access_token": self._tok()}
|
||||||
@@ -582,16 +537,13 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
parsed_url = urlparse.urlparse(args["path"])
|
parsed_url = urlparse.urlparse(args["path"])
|
||||||
qp.update(urlparse.parse_qs(parsed_url.query))
|
qp.update(urlparse.parse_qs(parsed_url.query))
|
||||||
args["path"] = parsed_url.path
|
args["path"] = parsed_url.path
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
reactor.callFromThread(
|
reactor.callFromThread(self._run_and_pprint, args["method"],
|
||||||
self._run_and_pprint,
|
args["path"],
|
||||||
args["method"],
|
args["data"],
|
||||||
args["path"],
|
query_params=qp)
|
||||||
args["data"],
|
|
||||||
query_params=qp,
|
|
||||||
)
|
|
||||||
|
|
||||||
def do_stream(self, line):
|
def do_stream(self, line):
|
||||||
"""Stream data from the server: "stream <longpoll timeout ms>" """
|
"""Stream data from the server: "stream <longpoll timeout ms>" """
|
||||||
@@ -601,31 +553,26 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
timeout = int(args["timeout"])
|
timeout = int(args["timeout"])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print("Timeout must be in milliseconds.")
|
print "Timeout must be in milliseconds."
|
||||||
return
|
return
|
||||||
reactor.callFromThread(self._do_event_stream, timeout)
|
reactor.callFromThread(self._do_event_stream, timeout)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_event_stream(self, timeout):
|
def _do_event_stream(self, timeout):
|
||||||
res = yield defer.ensureDeferred(
|
res = yield self.http_client.get_json(
|
||||||
self.http_client.get_json(
|
|
||||||
self._url() + "/events",
|
self._url() + "/events",
|
||||||
{
|
{
|
||||||
"access_token": self._tok(),
|
"access_token": self._tok(),
|
||||||
"timeout": str(timeout),
|
"timeout": str(timeout),
|
||||||
"from": self.event_stream_token,
|
"from": self.event_stream_token
|
||||||
},
|
})
|
||||||
)
|
print json.dumps(res, indent=4)
|
||||||
)
|
|
||||||
print(json.dumps(res, indent=4))
|
|
||||||
|
|
||||||
if "chunk" in res:
|
if "chunk" in res:
|
||||||
for event in res["chunk"]:
|
for event in res["chunk"]:
|
||||||
if (
|
if (event["type"] == "m.room.message" and
|
||||||
event["type"] == "m.room.message"
|
self._is_on("send_delivery_receipts") and
|
||||||
and self._is_on("send_delivery_receipts")
|
event["user_id"] != self._usr()): # not sent by us
|
||||||
and event["user_id"] != self._usr()
|
|
||||||
): # not sent by us
|
|
||||||
self._send_receipt(event, "d")
|
self._send_receipt(event, "d")
|
||||||
|
|
||||||
# update the position in the stram
|
# update the position in the stram
|
||||||
@@ -633,28 +580,18 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
self.event_stream_token = res["end"]
|
self.event_stream_token = res["end"]
|
||||||
|
|
||||||
def _send_receipt(self, event, feedback_type):
|
def _send_receipt(self, event, feedback_type):
|
||||||
path = "/rooms/%s/messages/%s/%s/feedback/%s/%s" % (
|
path = ("/rooms/%s/messages/%s/%s/feedback/%s/%s" %
|
||||||
urllib.quote(event["room_id"]),
|
(urllib.quote(event["room_id"]), event["user_id"], event["msg_id"],
|
||||||
event["user_id"],
|
self._usr(), feedback_type))
|
||||||
event["msg_id"],
|
|
||||||
self._usr(),
|
|
||||||
feedback_type,
|
|
||||||
)
|
|
||||||
data = {}
|
data = {}
|
||||||
reactor.callFromThread(
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data,
|
||||||
self._run_and_pprint,
|
alt_text="Sent receipt for %s" % event["msg_id"])
|
||||||
"PUT",
|
|
||||||
path,
|
|
||||||
data=data,
|
|
||||||
alt_text="Sent receipt for %s" % event["msg_id"],
|
|
||||||
)
|
|
||||||
|
|
||||||
def _do_membership_change(self, roomid, membership, userid):
|
def _do_membership_change(self, roomid, membership, userid):
|
||||||
path = "/rooms/%s/state/m.room.member/%s" % (
|
path = "/rooms/%s/state/m.room.member/%s" % (urllib.quote(roomid), urllib.quote(userid))
|
||||||
urllib.quote(roomid),
|
data = {
|
||||||
urllib.quote(userid),
|
"membership": membership
|
||||||
)
|
}
|
||||||
data = {"membership": membership}
|
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data)
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data)
|
||||||
|
|
||||||
def do_displayname(self, line):
|
def do_displayname(self, line):
|
||||||
@@ -690,7 +627,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
self._do_presence_state(2, line)
|
self._do_presence_state(2, line)
|
||||||
|
|
||||||
def _parse(self, line, keys, force_keys=False):
|
def _parse(self, line, keys, force_keys=False):
|
||||||
"""Parses the given line.
|
""" Parses the given line.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
line : The line to parse
|
line : The line to parse
|
||||||
@@ -707,21 +644,16 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
for i, arg in enumerate(line_args):
|
for i, arg in enumerate(line_args):
|
||||||
for config_key in self.config:
|
for config_key in self.config:
|
||||||
if ("$" + config_key) in arg:
|
if ("$" + config_key) in arg:
|
||||||
arg = arg.replace("$" + config_key, self.config[config_key])
|
arg = arg.replace("$" + config_key,
|
||||||
|
self.config[config_key])
|
||||||
line_args[i] = arg
|
line_args[i] = arg
|
||||||
|
|
||||||
return dict(zip(keys, line_args))
|
return dict(zip(keys, line_args))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _run_and_pprint(
|
def _run_and_pprint(self, method, path, data=None,
|
||||||
self,
|
query_params={"access_token": None}, alt_text=None):
|
||||||
method,
|
""" Runs an HTTP request and pretty prints the output.
|
||||||
path,
|
|
||||||
data=None,
|
|
||||||
query_params: Optional[dict] = None,
|
|
||||||
alt_text=None,
|
|
||||||
):
|
|
||||||
"""Runs an HTTP request and pretty prints the output.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
method: HTTP method
|
method: HTTP method
|
||||||
@@ -729,37 +661,35 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
data: Raw JSON data if any
|
data: Raw JSON data if any
|
||||||
query_params: dict of query parameters to add to the url
|
query_params: dict of query parameters to add to the url
|
||||||
"""
|
"""
|
||||||
query_params = query_params or {"access_token": None}
|
|
||||||
|
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
if "access_token" in query_params:
|
if "access_token" in query_params:
|
||||||
query_params["access_token"] = self._tok()
|
query_params["access_token"] = self._tok()
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(
|
json_res = yield self.http_client.do_request(method, url,
|
||||||
method, url, data=data, qparams=query_params
|
data=data,
|
||||||
)
|
qparams=query_params)
|
||||||
if alt_text:
|
if alt_text:
|
||||||
print(alt_text)
|
print alt_text
|
||||||
else:
|
else:
|
||||||
print(json.dumps(json_res, indent=4))
|
print json.dumps(json_res, indent=4)
|
||||||
|
|
||||||
|
|
||||||
def save_config(config):
|
def save_config(config):
|
||||||
with open(CONFIG_JSON, "w") as out:
|
with open(CONFIG_JSON, 'w') as out:
|
||||||
json.dump(config, out)
|
json.dump(config, out)
|
||||||
|
|
||||||
|
|
||||||
def main(server_url, identity_server_url, username, token, config_path):
|
def main(server_url, identity_server_url, username, token, config_path):
|
||||||
print("Synapse command line client")
|
print "Synapse command line client"
|
||||||
print("===========================")
|
print "==========================="
|
||||||
print("Server: %s" % server_url)
|
print "Server: %s" % server_url
|
||||||
print("Type 'help' to get started.")
|
print "Type 'help' to get started."
|
||||||
print("Close this console with CTRL+C then CTRL+D.")
|
print "Close this console with CTRL+C then CTRL+D."
|
||||||
if not username or not token:
|
if not username or not token:
|
||||||
print("- 'register <username>' - Register an account")
|
print "- 'register <username>' - Register an account"
|
||||||
print("- 'stream' - Connect to the event stream")
|
print "- 'stream' - Connect to the event stream"
|
||||||
print("- 'create <roomid>' - Create a room")
|
print "- 'create <roomid>' - Create a room"
|
||||||
print("- 'send <roomid> <message>' - Send a message")
|
print "- 'send <roomid> <message>' - Send a message"
|
||||||
http_client = TwistedHttpClient()
|
http_client = TwistedHttpClient()
|
||||||
|
|
||||||
# the command line client
|
# the command line client
|
||||||
@@ -769,14 +699,14 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||||||
global CONFIG_JSON
|
global CONFIG_JSON
|
||||||
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
||||||
try:
|
try:
|
||||||
with open(config_path, "r") as config:
|
with open(config_path, 'r') as config:
|
||||||
syn_cmd.config = json.load(config)
|
syn_cmd.config = json.load(config)
|
||||||
try:
|
try:
|
||||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
print("Loaded config from %s" % config_path)
|
print "Loaded config from %s" % config_path
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Twisted-specific: Runs the command processor in Twisted's event loop
|
# Twisted-specific: Runs the command processor in Twisted's event loop
|
||||||
@@ -786,37 +716,27 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser("Starts a synapse client.")
|
parser = argparse.ArgumentParser("Starts a synapse client.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-s",
|
"-s", "--server", dest="server", default="http://localhost:8008",
|
||||||
"--server",
|
help="The URL of the home server to talk to.")
|
||||||
dest="server",
|
|
||||||
default="http://localhost:8008",
|
|
||||||
help="The URL of the home server to talk to.",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-i",
|
"-i", "--identity-server", dest="identityserver", default="http://localhost:8090",
|
||||||
"--identity-server",
|
help="The URL of the identity server to talk to.")
|
||||||
dest="identityserver",
|
|
||||||
default="http://localhost:8090",
|
|
||||||
help="The URL of the identity server to talk to.",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-u", "--username", dest="username", help="Your username on the server."
|
"-u", "--username", dest="username",
|
||||||
)
|
help="Your username on the server.")
|
||||||
parser.add_argument("-t", "--token", dest="token", help="Your access token.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-c",
|
"-t", "--token", dest="token",
|
||||||
"--config",
|
help="Your access token.")
|
||||||
dest="config",
|
parser.add_argument(
|
||||||
default=CONFIG_JSON,
|
"-c", "--config", dest="config", default=CONFIG_JSON,
|
||||||
help="The location of the config.json file to read from.",
|
help="The location of the config.json file to read from.")
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if not args.server:
|
if not args.server:
|
||||||
print("You must supply a server URL to communicate with.")
|
print "You must supply a server URL to communicate with."
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -12,21 +13,22 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
import urllib
|
|
||||||
from pprint import pformat
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
from twisted.web.http_headers import Headers
|
from twisted.web.http_headers import Headers
|
||||||
|
from twisted.internet import defer, reactor
|
||||||
|
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
import json
|
||||||
|
import urllib
|
||||||
|
|
||||||
|
|
||||||
class HttpClient:
|
class HttpClient(object):
|
||||||
"""Interface for talking json over http"""
|
""" Interface for talking json over http
|
||||||
|
"""
|
||||||
|
|
||||||
def put_json(self, url, data):
|
def put_json(self, url, data):
|
||||||
"""Sends the specifed json data using PUT
|
""" Sends the specifed json data using PUT
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url (str): The URL to PUT data to.
|
url (str): The URL to PUT data to.
|
||||||
@@ -34,13 +36,15 @@ class HttpClient:
|
|||||||
the request body. This will be encoded as JSON.
|
the request body. This will be encoded as JSON.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
Deferred: Succeeds when we get *any* HTTP response.
|
||||||
will be the decoded JSON body.
|
|
||||||
|
The result of the deferred is a tuple of `(code, response)`,
|
||||||
|
where `response` is a dict representing the decoded JSON body.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_json(self, url, args=None):
|
def get_json(self, url, args=None):
|
||||||
"""Gets some json from the given host homeserver and path
|
""" Get's some json from the given host homeserver and path
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url (str): The URL to GET data from.
|
url (str): The URL to GET data from.
|
||||||
@@ -50,14 +54,16 @@ class HttpClient:
|
|||||||
and *not* a string.
|
and *not* a string.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
Deferred: Succeeds when we get *any* HTTP response.
|
||||||
will be the decoded JSON body.
|
|
||||||
|
The result of the deferred is a tuple of `(code, response)`,
|
||||||
|
where `response` is a dict representing the decoded JSON body.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class TwistedHttpClient(HttpClient):
|
class TwistedHttpClient(HttpClient):
|
||||||
"""Wrapper around the twisted HTTP client api.
|
""" Wrapper around the twisted HTTP client api.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
||||||
@@ -70,7 +76,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def put_json(self, url, data):
|
def put_json(self, url, data):
|
||||||
response = yield self._create_put_request(
|
response = yield self._create_put_request(
|
||||||
url, data, headers_dict={"Content-Type": ["application/json"]}
|
url,
|
||||||
|
data,
|
||||||
|
headers_dict={"Content-Type": ["application/json"]}
|
||||||
)
|
)
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
defer.returnValue((response.code, body))
|
defer.returnValue((response.code, body))
|
||||||
@@ -85,46 +93,45 @@ class TwistedHttpClient(HttpClient):
|
|||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
defer.returnValue(json.loads(body))
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
def _create_put_request(self, url, json_data, headers_dict={}):
|
||||||
"""Wrapper of _create_request to issue a PUT request"""
|
""" Wrapper of _create_request to issue a PUT request
|
||||||
headers_dict = headers_dict or {}
|
"""
|
||||||
|
|
||||||
if "Content-Type" not in headers_dict:
|
if "Content-Type" not in headers_dict:
|
||||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
raise defer.error(
|
||||||
|
RuntimeError("Must include Content-Type header for PUTs"))
|
||||||
|
|
||||||
return self._create_request(
|
return self._create_request(
|
||||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
"PUT",
|
||||||
|
url,
|
||||||
|
producer=_JsonProducer(json_data),
|
||||||
|
headers_dict=headers_dict
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
def _create_get_request(self, url, headers_dict={}):
|
||||||
"""Wrapper of _create_request to issue a GET request"""
|
""" Wrapper of _create_request to issue a GET request
|
||||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
"""
|
||||||
|
return self._create_request(
|
||||||
|
"GET",
|
||||||
|
url,
|
||||||
|
headers_dict=headers_dict
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_request(
|
def do_request(self, method, url, data=None, qparams=None, jsonreq=True, headers={}):
|
||||||
self,
|
|
||||||
method,
|
|
||||||
url,
|
|
||||||
data=None,
|
|
||||||
qparams=None,
|
|
||||||
jsonreq=True,
|
|
||||||
headers: Optional[dict] = None,
|
|
||||||
):
|
|
||||||
headers = headers or {}
|
|
||||||
|
|
||||||
if qparams:
|
if qparams:
|
||||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||||
|
|
||||||
if jsonreq:
|
if jsonreq:
|
||||||
prod = _JsonProducer(data)
|
prod = _JsonProducer(data)
|
||||||
headers["Content-Type"] = ["application/json"]
|
headers['Content-Type'] = ["application/json"];
|
||||||
else:
|
else:
|
||||||
prod = _RawProducer(data)
|
prod = _RawProducer(data)
|
||||||
|
|
||||||
if method in ["POST", "PUT"]:
|
if method in ["POST", "PUT"]:
|
||||||
response = yield self._create_request(
|
response = yield self._create_request(method, url,
|
||||||
method, url, producer=prod, headers_dict=headers
|
producer=prod,
|
||||||
)
|
headers_dict=headers)
|
||||||
else:
|
else:
|
||||||
response = yield self._create_request(method, url)
|
response = yield self._create_request(method, url)
|
||||||
|
|
||||||
@@ -132,33 +139,33 @@ class TwistedHttpClient(HttpClient):
|
|||||||
defer.returnValue(json.loads(body))
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(
|
def _create_request(self, method, url, producer=None, headers_dict={}):
|
||||||
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
""" Creates and sends a request to the given url
|
||||||
):
|
"""
|
||||||
"""Creates and sends a request to the given url"""
|
|
||||||
headers_dict = headers_dict or {}
|
|
||||||
|
|
||||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||||
|
|
||||||
retries_left = 5
|
retries_left = 5
|
||||||
print("%s to %s with headers %s" % (method, url, headers_dict))
|
print "%s to %s with headers %s" % (method, url, headers_dict)
|
||||||
if self.verbose and producer:
|
if self.verbose and producer:
|
||||||
if "password" in producer.data:
|
if "password" in producer.data:
|
||||||
temp = producer.data["password"]
|
temp = producer.data["password"]
|
||||||
producer.data["password"] = "[REDACTED]"
|
producer.data["password"] = "[REDACTED]"
|
||||||
print(json.dumps(producer.data, indent=4))
|
print json.dumps(producer.data, indent=4)
|
||||||
producer.data["password"] = temp
|
producer.data["password"] = temp
|
||||||
else:
|
else:
|
||||||
print(json.dumps(producer.data, indent=4))
|
print json.dumps(producer.data, indent=4)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
response = yield self.agent.request(
|
response = yield self.agent.request(
|
||||||
method, url.encode("UTF8"), Headers(headers_dict), producer
|
method,
|
||||||
|
url.encode("UTF8"),
|
||||||
|
Headers(headers_dict),
|
||||||
|
producer
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("uh oh: %s" % e)
|
print "uh oh: %s" % e
|
||||||
if retries_left:
|
if retries_left:
|
||||||
yield self.sleep(2 ** (5 - retries_left))
|
yield self.sleep(2 ** (5 - retries_left))
|
||||||
retries_left -= 1
|
retries_left -= 1
|
||||||
@@ -166,8 +173,8 @@ class TwistedHttpClient(HttpClient):
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print("Status %s %s" % (response.code, response.phrase))
|
print "Status %s %s" % (response.code, response.phrase)
|
||||||
print(pformat(list(response.headers.getAllRawHeaders())))
|
print pformat(list(response.headers.getAllRawHeaders()))
|
||||||
defer.returnValue(response)
|
defer.returnValue(response)
|
||||||
|
|
||||||
def sleep(self, seconds):
|
def sleep(self, seconds):
|
||||||
@@ -175,8 +182,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
reactor.callLater(seconds, d.callback, seconds)
|
reactor.callLater(seconds, d.callback, seconds)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
class _RawProducer(object):
|
||||||
class _RawProducer:
|
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
self.body = data
|
self.body = data
|
||||||
@@ -192,10 +198,9 @@ class _RawProducer:
|
|||||||
def stopProducing(self):
|
def stopProducing(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class _JsonProducer(object):
|
||||||
class _JsonProducer:
|
""" Used by the twisted http client to create the HTTP body from json
|
||||||
"""Used by the twisted http client to create the HTTP body from json"""
|
"""
|
||||||
|
|
||||||
def __init__(self, jsn):
|
def __init__(self, jsn):
|
||||||
self.data = jsn
|
self.data = jsn
|
||||||
self.body = json.dumps(jsn).encode("utf8")
|
self.body = json.dumps(jsn).encode("utf8")
|
||||||
@@ -209,4 +214,4 @@ class _JsonProducer:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def stopProducing(self):
|
def stopProducing(self):
|
||||||
pass
|
pass
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
|
|
||||||
# Synapse Docker
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
A sample ``docker-compose.yml`` is provided, including example labels for
|
|
||||||
reverse proxying and other artifacts. The docker-compose file is an example,
|
|
||||||
please comment/uncomment sections that are not suitable for your usecase.
|
|
||||||
|
|
||||||
Specify a ``SYNAPSE_CONFIG_PATH``, preferably to a persistent path,
|
|
||||||
to use manual configuration.
|
|
||||||
|
|
||||||
To generate a fresh `homeserver.yaml`, you can use the `generate` command.
|
|
||||||
(See the [documentation](../../docker/README.md#generating-a-configuration-file)
|
|
||||||
for more information.) You will need to specify appropriate values for at least the
|
|
||||||
`SYNAPSE_SERVER_NAME` and `SYNAPSE_REPORT_STATS` environment variables. For example:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host -e SYNAPSE_REPORT_STATS=yes synapse generate
|
|
||||||
```
|
|
||||||
|
|
||||||
(This will also generate necessary signing keys.)
|
|
||||||
|
|
||||||
Then, customize your configuration and run the server:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker-compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
### More information
|
|
||||||
|
|
||||||
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
# This compose file is compatible with Compose itself, it might need some
|
|
||||||
# adjustments to run properly with stack.
|
|
||||||
|
|
||||||
version: '3'
|
|
||||||
|
|
||||||
services:
|
|
||||||
|
|
||||||
synapse:
|
|
||||||
build:
|
|
||||||
context: ../..
|
|
||||||
dockerfile: docker/Dockerfile
|
|
||||||
image: docker.io/matrixdotorg/synapse:latest
|
|
||||||
# Since synapse does not retry to connect to the database, restart upon
|
|
||||||
# failure
|
|
||||||
restart: unless-stopped
|
|
||||||
# See the readme for a full documentation of the environment settings
|
|
||||||
# NOTE: You must edit homeserver.yaml to use postgres, it defaults to sqlite
|
|
||||||
environment:
|
|
||||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
|
||||||
volumes:
|
|
||||||
# You may either store all the files in a local folder
|
|
||||||
- ./files:/data
|
|
||||||
# .. or you may split this between different storage points
|
|
||||||
# - ./files:/data
|
|
||||||
# - /path/to/ssd:/data/uploads
|
|
||||||
# - /path/to/large_hdd:/data/media
|
|
||||||
depends_on:
|
|
||||||
- db
|
|
||||||
# In order to expose Synapse, remove one of the following, you might for
|
|
||||||
# instance expose the TLS port directly:
|
|
||||||
ports:
|
|
||||||
- 8448:8448/tcp
|
|
||||||
# ... or use a reverse proxy, here is an example for traefik:
|
|
||||||
labels:
|
|
||||||
# The following lines are valid for Traefik version 1.x:
|
|
||||||
- traefik.enable=true
|
|
||||||
- traefik.frontend.rule=Host:my.matrix.Host
|
|
||||||
- traefik.port=8008
|
|
||||||
# Alternatively, for Traefik version 2.0:
|
|
||||||
- traefik.enable=true
|
|
||||||
- traefik.http.routers.http-synapse.entryPoints=http
|
|
||||||
- traefik.http.routers.http-synapse.rule=Host(`my.matrix.host`)
|
|
||||||
- traefik.http.middlewares.https_redirect.redirectscheme.scheme=https
|
|
||||||
- traefik.http.middlewares.https_redirect.redirectscheme.permanent=true
|
|
||||||
- traefik.http.routers.http-synapse.middlewares=https_redirect
|
|
||||||
- traefik.http.routers.https-synapse.entryPoints=https
|
|
||||||
- traefik.http.routers.https-synapse.rule=Host(`my.matrix.host`)
|
|
||||||
- traefik.http.routers.https-synapse.service=synapse
|
|
||||||
- traefik.http.routers.https-synapse.tls=true
|
|
||||||
- traefik.http.services.synapse.loadbalancer.server.port=8008
|
|
||||||
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
|
||||||
|
|
||||||
db:
|
|
||||||
image: docker.io/postgres:12-alpine
|
|
||||||
# Change that password, of course!
|
|
||||||
environment:
|
|
||||||
- POSTGRES_USER=synapse
|
|
||||||
- POSTGRES_PASSWORD=changeme
|
|
||||||
# ensure the database gets created correctly
|
|
||||||
# https://matrix-org.github.io/synapse/latest/postgres.html#set-up-database
|
|
||||||
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
|
||||||
volumes:
|
|
||||||
# You may store the database tables in a local folder..
|
|
||||||
- ./schemas:/var/lib/postgresql/data
|
|
||||||
# .. or store them on some high performance storage for better results
|
|
||||||
# - /path/to/ssd/storage:/var/lib/postgresql/data
|
|
||||||
@@ -1,111 +0,0 @@
|
|||||||
# Setting up Synapse with Workers using Docker Compose
|
|
||||||
|
|
||||||
This directory describes how deploy and manage Synapse and workers via [Docker Compose](https://docs.docker.com/compose/).
|
|
||||||
|
|
||||||
Example worker configuration files can be found [here](workers).
|
|
||||||
|
|
||||||
All examples and snippets assume that your Synapse service is called `synapse` in your Docker Compose file.
|
|
||||||
|
|
||||||
An example Docker Compose file can be found [here](docker-compose.yaml).
|
|
||||||
|
|
||||||
## Worker Service Examples in Docker Compose
|
|
||||||
|
|
||||||
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
|
||||||
|
|
||||||
### Generic Worker Example
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
synapse-generic-worker-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-generic-worker-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
|
||||||
start_period: "5s"
|
|
||||||
interval: "15s"
|
|
||||||
timeout: "5s"
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
|
||||||
# Expose port if required so your reverse proxy can send requests to this worker
|
|
||||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
|
||||||
ports:
|
|
||||||
- 8081:8081
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
### Federation Sender Example
|
|
||||||
|
|
||||||
Please note: The federation sender does not receive REST API calls so no exposed ports are required.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
synapse-federation-sender-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-federation-sender-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
disable: true
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
## `homeserver.yaml` Configuration
|
|
||||||
|
|
||||||
### Enable Redis
|
|
||||||
|
|
||||||
Locate the `redis` section of your `homeserver.yaml` and enable and configure it:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
redis:
|
|
||||||
enabled: true
|
|
||||||
host: redis
|
|
||||||
port: 6379
|
|
||||||
# password: <secret_password>
|
|
||||||
```
|
|
||||||
|
|
||||||
This assumes that your Redis service is called `redis` in your Docker Compose file.
|
|
||||||
|
|
||||||
### Add a replication Listener
|
|
||||||
|
|
||||||
Locate the `listeners` section of your `homeserver.yaml` and add the following replication listener:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
listeners:
|
|
||||||
# Other listeners
|
|
||||||
|
|
||||||
- port: 9093
|
|
||||||
type: http
|
|
||||||
resources:
|
|
||||||
- names: [replication]
|
|
||||||
```
|
|
||||||
|
|
||||||
This listener is used by the workers for replication and is referred to in worker config files using the following settings:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
worker_replication_host: synapse
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
```
|
|
||||||
|
|
||||||
### Configure Federation Senders
|
|
||||||
|
|
||||||
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# This will disable federation sending on the main Synapse instance
|
|
||||||
send_federation: false
|
|
||||||
|
|
||||||
federation_sender_instances:
|
|
||||||
- synapse-federation-sender-1 # The worker_name setting in your federation sender worker configuration file
|
|
||||||
```
|
|
||||||
|
|
||||||
## Other Worker types
|
|
||||||
|
|
||||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
networks:
|
|
||||||
backend:
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/var/lib/postgresql/data:/var/lib/postgresql/data:rw
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: synapse
|
|
||||||
POSTGRES_USER: synapse_user
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
POSTGRES_INITDB_ARGS: --encoding=UTF8 --locale=C
|
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
|
|
||||||
synapse:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw
|
|
||||||
ports:
|
|
||||||
- 8008:8008
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
environment:
|
|
||||||
SYNAPSE_CONFIG_DIR: /data
|
|
||||||
SYNAPSE_CONFIG_PATH: /data/homeserver.yaml
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
|
|
||||||
synapse-generic-worker-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-generic-worker-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
|
||||||
start_period: "5s"
|
|
||||||
interval: "15s"
|
|
||||||
timeout: "5s"
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
|
||||||
# Expose port if required so your reverse proxy can send requests to this worker
|
|
||||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
|
||||||
ports:
|
|
||||||
- 8081:8081
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
|
|
||||||
synapse-federation-sender-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-federation-sender-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
disable: true
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
worker_app: synapse.app.federation_sender
|
|
||||||
worker_name: synapse-federation-sender-1
|
|
||||||
|
|
||||||
# The replication listener on the main synapse process.
|
|
||||||
worker_replication_host: synapse
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
|
|
||||||
worker_log_config: /data/federation_sender.log.config
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
worker_app: synapse.app.generic_worker
|
|
||||||
worker_name: synapse-generic-worker-1
|
|
||||||
|
|
||||||
# The replication listener on the main synapse process.
|
|
||||||
worker_replication_host: synapse
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
|
|
||||||
worker_listeners:
|
|
||||||
- type: http
|
|
||||||
port: 8081
|
|
||||||
x_forwarded: true
|
|
||||||
resources:
|
|
||||||
- names: [client, federation]
|
|
||||||
|
|
||||||
worker_log_config: /data/worker.log.config
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
# Example log_config file for synapse. To enable, point `log_config` to it in
|
|
||||||
# `homeserver.yaml`, and restart synapse.
|
|
||||||
#
|
|
||||||
# This configuration will produce similar results to the defaults within
|
|
||||||
# synapse, but can be edited to give more flexibility.
|
|
||||||
|
|
||||||
version: 1
|
|
||||||
|
|
||||||
formatters:
|
|
||||||
fmt:
|
|
||||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s- %(message)s'
|
|
||||||
|
|
||||||
filters:
|
|
||||||
context:
|
|
||||||
(): synapse.logging.context.LoggingContextFilter
|
|
||||||
request: ""
|
|
||||||
|
|
||||||
handlers:
|
|
||||||
# example output to console
|
|
||||||
console:
|
|
||||||
class: logging.StreamHandler
|
|
||||||
formatter: fmt
|
|
||||||
filters: [context]
|
|
||||||
|
|
||||||
# example output to file - to enable, edit 'root' config below.
|
|
||||||
file:
|
|
||||||
class: logging.handlers.RotatingFileHandler
|
|
||||||
formatter: fmt
|
|
||||||
filename: /var/log/synapse/homeserver.log
|
|
||||||
maxBytes: 100000000
|
|
||||||
backupCount: 3
|
|
||||||
filters: [context]
|
|
||||||
encoding: utf8
|
|
||||||
|
|
||||||
root:
|
|
||||||
level: INFO
|
|
||||||
handlers: [console] # to use file handler instead, switch to [file]
|
|
||||||
|
|
||||||
loggers:
|
|
||||||
synapse:
|
|
||||||
level: INFO
|
|
||||||
|
|
||||||
synapse.storage.SQL:
|
|
||||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
|
||||||
# information such as access tokens.
|
|
||||||
level: INFO
|
|
||||||
|
|
||||||
# example of enabling debugging for a component:
|
|
||||||
#
|
|
||||||
# synapse.federation.transport.server:
|
|
||||||
# level: DEBUG
|
|
||||||
168
contrib/experiments/cursesio.py
Normal file
168
contrib/experiments/cursesio.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
# Copyright 2014 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import curses
|
||||||
|
import curses.wrapper
|
||||||
|
from curses.ascii import isprint
|
||||||
|
|
||||||
|
from twisted.internet import reactor
|
||||||
|
|
||||||
|
|
||||||
|
class CursesStdIO():
|
||||||
|
def __init__(self, stdscr, callback=None):
|
||||||
|
self.statusText = "Synapse test app -"
|
||||||
|
self.searchText = ''
|
||||||
|
self.stdscr = stdscr
|
||||||
|
|
||||||
|
self.logLine = ''
|
||||||
|
|
||||||
|
self.callback = callback
|
||||||
|
|
||||||
|
self._setup()
|
||||||
|
|
||||||
|
def _setup(self):
|
||||||
|
self.stdscr.nodelay(1) # Make non blocking
|
||||||
|
|
||||||
|
self.rows, self.cols = self.stdscr.getmaxyx()
|
||||||
|
self.lines = []
|
||||||
|
|
||||||
|
curses.use_default_colors()
|
||||||
|
|
||||||
|
self.paintStatus(self.statusText)
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
def set_callback(self, callback):
|
||||||
|
self.callback = callback
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
""" We want to select on FD 0 """
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def connectionLost(self, reason):
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def print_line(self, text):
|
||||||
|
""" add a line to the internal list of lines"""
|
||||||
|
|
||||||
|
self.lines.append(text)
|
||||||
|
self.redraw()
|
||||||
|
|
||||||
|
def print_log(self, text):
|
||||||
|
self.logLine = text
|
||||||
|
self.redraw()
|
||||||
|
|
||||||
|
def redraw(self):
|
||||||
|
""" method for redisplaying lines
|
||||||
|
based on internal list of lines """
|
||||||
|
|
||||||
|
self.stdscr.clear()
|
||||||
|
self.paintStatus(self.statusText)
|
||||||
|
i = 0
|
||||||
|
index = len(self.lines) - 1
|
||||||
|
while i < (self.rows - 3) and index >= 0:
|
||||||
|
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index],
|
||||||
|
curses.A_NORMAL)
|
||||||
|
i = i + 1
|
||||||
|
index = index - 1
|
||||||
|
|
||||||
|
self.printLogLine(self.logLine)
|
||||||
|
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
def paintStatus(self, text):
|
||||||
|
if len(text) > self.cols:
|
||||||
|
raise RuntimeError("TextTooLongError")
|
||||||
|
|
||||||
|
self.stdscr.addstr(
|
||||||
|
self.rows - 2, 0,
|
||||||
|
text + ' ' * (self.cols - len(text)),
|
||||||
|
curses.A_STANDOUT)
|
||||||
|
|
||||||
|
def printLogLine(self, text):
|
||||||
|
self.stdscr.addstr(
|
||||||
|
0, 0,
|
||||||
|
text + ' ' * (self.cols - len(text)),
|
||||||
|
curses.A_STANDOUT)
|
||||||
|
|
||||||
|
def doRead(self):
|
||||||
|
""" Input is ready! """
|
||||||
|
curses.noecho()
|
||||||
|
c = self.stdscr.getch() # read a character
|
||||||
|
|
||||||
|
if c == curses.KEY_BACKSPACE:
|
||||||
|
self.searchText = self.searchText[:-1]
|
||||||
|
|
||||||
|
elif c == curses.KEY_ENTER or c == 10:
|
||||||
|
text = self.searchText
|
||||||
|
self.searchText = ''
|
||||||
|
|
||||||
|
self.print_line(">> %s" % text)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.callback:
|
||||||
|
self.callback.on_line(text)
|
||||||
|
except Exception as e:
|
||||||
|
self.print_line(str(e))
|
||||||
|
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
elif isprint(c):
|
||||||
|
if len(self.searchText) == self.cols - 2:
|
||||||
|
return
|
||||||
|
self.searchText = self.searchText + chr(c)
|
||||||
|
|
||||||
|
self.stdscr.addstr(self.rows - 1, 0,
|
||||||
|
self.searchText + (' ' * (
|
||||||
|
self.cols - len(self.searchText) - 2)))
|
||||||
|
|
||||||
|
self.paintStatus(self.statusText + ' %d' % len(self.searchText))
|
||||||
|
self.stdscr.move(self.rows - 1, len(self.searchText))
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
def logPrefix(self):
|
||||||
|
return "CursesStdIO"
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
""" clean up """
|
||||||
|
|
||||||
|
curses.nocbreak()
|
||||||
|
self.stdscr.keypad(0)
|
||||||
|
curses.echo()
|
||||||
|
curses.endwin()
|
||||||
|
|
||||||
|
|
||||||
|
class Callback(object):
|
||||||
|
|
||||||
|
def __init__(self, stdio):
|
||||||
|
self.stdio = stdio
|
||||||
|
|
||||||
|
def on_line(self, text):
|
||||||
|
self.stdio.print_line(text)
|
||||||
|
|
||||||
|
|
||||||
|
def main(stdscr):
|
||||||
|
screen = CursesStdIO(stdscr) # create Screen object
|
||||||
|
|
||||||
|
callback = Callback(screen)
|
||||||
|
|
||||||
|
screen.set_callback(callback)
|
||||||
|
|
||||||
|
stdscr.refresh()
|
||||||
|
reactor.addReader(screen)
|
||||||
|
reactor.run()
|
||||||
|
screen.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
curses.wrapper(main)
|
||||||
394
contrib/experiments/test_messaging.py
Normal file
394
contrib/experiments/test_messaging.py
Normal file
@@ -0,0 +1,394 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2014 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
""" This is an example of using the server to server implementation to do a
|
||||||
|
basic chat style thing. It accepts commands from stdin and outputs to stdout.
|
||||||
|
|
||||||
|
It assumes that ucids are of the form <user>@<domain>, and uses <domain> as
|
||||||
|
the address of the remote home server to hit.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python test_messaging.py <port>
|
||||||
|
|
||||||
|
Currently assumes the local address is localhost:<port>
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from synapse.federation import (
|
||||||
|
ReplicationHandler
|
||||||
|
)
|
||||||
|
|
||||||
|
from synapse.federation.units import Pdu
|
||||||
|
|
||||||
|
from synapse.util import origin_from_ucid
|
||||||
|
|
||||||
|
from synapse.app.homeserver import SynapseHomeServer
|
||||||
|
|
||||||
|
#from synapse.util.logutils import log_function
|
||||||
|
|
||||||
|
from twisted.internet import reactor, defer
|
||||||
|
from twisted.python import log
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import cursesio
|
||||||
|
import curses.wrapper
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("example")
|
||||||
|
|
||||||
|
|
||||||
|
def excpetion_errback(failure):
|
||||||
|
logging.exception(failure)
|
||||||
|
|
||||||
|
|
||||||
|
class InputOutput(object):
|
||||||
|
""" This is responsible for basic I/O so that a user can interact with
|
||||||
|
the example app.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, screen, user):
|
||||||
|
self.screen = screen
|
||||||
|
self.user = user
|
||||||
|
|
||||||
|
def set_home_server(self, server):
|
||||||
|
self.server = server
|
||||||
|
|
||||||
|
def on_line(self, line):
|
||||||
|
""" This is where we process commands.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
m = re.match("^join (\S+)$", line)
|
||||||
|
if m:
|
||||||
|
# The `sender` wants to join a room.
|
||||||
|
room_name, = m.groups()
|
||||||
|
self.print_line("%s joining %s" % (self.user, room_name))
|
||||||
|
self.server.join_room(room_name, self.user, self.user)
|
||||||
|
#self.print_line("OK.")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.match("^invite (\S+) (\S+)$", line)
|
||||||
|
if m:
|
||||||
|
# `sender` wants to invite someone to a room
|
||||||
|
room_name, invitee = m.groups()
|
||||||
|
self.print_line("%s invited to %s" % (invitee, room_name))
|
||||||
|
self.server.invite_to_room(room_name, self.user, invitee)
|
||||||
|
#self.print_line("OK.")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.match("^send (\S+) (.*)$", line)
|
||||||
|
if m:
|
||||||
|
# `sender` wants to message a room
|
||||||
|
room_name, body = m.groups()
|
||||||
|
self.print_line("%s send to %s" % (self.user, room_name))
|
||||||
|
self.server.send_message(room_name, self.user, body)
|
||||||
|
#self.print_line("OK.")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.match("^backfill (\S+)$", line)
|
||||||
|
if m:
|
||||||
|
# we want to backfill a room
|
||||||
|
room_name, = m.groups()
|
||||||
|
self.print_line("backfill %s" % room_name)
|
||||||
|
self.server.backfill(room_name)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.print_line("Unrecognized command")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
def print_line(self, text):
|
||||||
|
self.screen.print_line(text)
|
||||||
|
|
||||||
|
def print_log(self, text):
|
||||||
|
self.screen.print_log(text)
|
||||||
|
|
||||||
|
|
||||||
|
class IOLoggerHandler(logging.Handler):
|
||||||
|
|
||||||
|
def __init__(self, io):
|
||||||
|
logging.Handler.__init__(self)
|
||||||
|
self.io = io
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
if record.levelno < logging.WARN:
|
||||||
|
return
|
||||||
|
|
||||||
|
msg = self.format(record)
|
||||||
|
self.io.print_log(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class Room(object):
|
||||||
|
""" Used to store (in memory) the current membership state of a room, and
|
||||||
|
which home servers we should send PDUs associated with the room to.
|
||||||
|
"""
|
||||||
|
def __init__(self, room_name):
|
||||||
|
self.room_name = room_name
|
||||||
|
self.invited = set()
|
||||||
|
self.participants = set()
|
||||||
|
self.servers = set()
|
||||||
|
|
||||||
|
self.oldest_server = None
|
||||||
|
|
||||||
|
self.have_got_metadata = False
|
||||||
|
|
||||||
|
def add_participant(self, participant):
|
||||||
|
""" Someone has joined the room
|
||||||
|
"""
|
||||||
|
self.participants.add(participant)
|
||||||
|
self.invited.discard(participant)
|
||||||
|
|
||||||
|
server = origin_from_ucid(participant)
|
||||||
|
self.servers.add(server)
|
||||||
|
|
||||||
|
if not self.oldest_server:
|
||||||
|
self.oldest_server = server
|
||||||
|
|
||||||
|
def add_invited(self, invitee):
|
||||||
|
""" Someone has been invited to the room
|
||||||
|
"""
|
||||||
|
self.invited.add(invitee)
|
||||||
|
self.servers.add(origin_from_ucid(invitee))
|
||||||
|
|
||||||
|
|
||||||
|
class HomeServer(ReplicationHandler):
|
||||||
|
""" A very basic home server implentation that allows people to join a
|
||||||
|
room and then invite other people.
|
||||||
|
"""
|
||||||
|
def __init__(self, server_name, replication_layer, output):
|
||||||
|
self.server_name = server_name
|
||||||
|
self.replication_layer = replication_layer
|
||||||
|
self.replication_layer.set_handler(self)
|
||||||
|
|
||||||
|
self.joined_rooms = {}
|
||||||
|
|
||||||
|
self.output = output
|
||||||
|
|
||||||
|
def on_receive_pdu(self, pdu):
|
||||||
|
""" We just received a PDU
|
||||||
|
"""
|
||||||
|
pdu_type = pdu.pdu_type
|
||||||
|
|
||||||
|
if pdu_type == "sy.room.message":
|
||||||
|
self._on_message(pdu)
|
||||||
|
elif pdu_type == "sy.room.member" and "membership" in pdu.content:
|
||||||
|
if pdu.content["membership"] == "join":
|
||||||
|
self._on_join(pdu.context, pdu.state_key)
|
||||||
|
elif pdu.content["membership"] == "invite":
|
||||||
|
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
||||||
|
else:
|
||||||
|
self.output.print_line("#%s (unrec) %s = %s" %
|
||||||
|
(pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||||
|
)
|
||||||
|
|
||||||
|
#def on_state_change(self, pdu):
|
||||||
|
##self.output.print_line("#%s (state) %s *** %s" %
|
||||||
|
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||||
|
##)
|
||||||
|
|
||||||
|
#if "joinee" in pdu.content:
|
||||||
|
#self._on_join(pdu.context, pdu.content["joinee"])
|
||||||
|
#elif "invitee" in pdu.content:
|
||||||
|
#self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||||
|
|
||||||
|
def _on_message(self, pdu):
|
||||||
|
""" We received a message
|
||||||
|
"""
|
||||||
|
self.output.print_line("#%s %s %s" %
|
||||||
|
(pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||||
|
)
|
||||||
|
|
||||||
|
def _on_join(self, context, joinee):
|
||||||
|
""" Someone has joined a room, either a remote user or a local user
|
||||||
|
"""
|
||||||
|
room = self._get_or_create_room(context)
|
||||||
|
room.add_participant(joinee)
|
||||||
|
|
||||||
|
self.output.print_line("#%s %s %s" %
|
||||||
|
(context, joinee, "*** JOINED")
|
||||||
|
)
|
||||||
|
|
||||||
|
def _on_invite(self, origin, context, invitee):
|
||||||
|
""" Someone has been invited
|
||||||
|
"""
|
||||||
|
room = self._get_or_create_room(context)
|
||||||
|
room.add_invited(invitee)
|
||||||
|
|
||||||
|
self.output.print_line("#%s %s %s" %
|
||||||
|
(context, invitee, "*** INVITED")
|
||||||
|
)
|
||||||
|
|
||||||
|
if not room.have_got_metadata and origin is not self.server_name:
|
||||||
|
logger.debug("Get room state")
|
||||||
|
self.replication_layer.get_state_for_context(origin, context)
|
||||||
|
room.have_got_metadata = True
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def send_message(self, room_name, sender, body):
|
||||||
|
""" Send a message to a room!
|
||||||
|
"""
|
||||||
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield self.replication_layer.send_pdu(
|
||||||
|
Pdu.create_new(
|
||||||
|
context=room_name,
|
||||||
|
pdu_type="sy.room.message",
|
||||||
|
content={"sender": sender, "body": body},
|
||||||
|
origin=self.server_name,
|
||||||
|
destinations=destinations,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def join_room(self, room_name, sender, joinee):
|
||||||
|
""" Join a room!
|
||||||
|
"""
|
||||||
|
self._on_join(room_name, joinee)
|
||||||
|
|
||||||
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdu = Pdu.create_new(
|
||||||
|
context=room_name,
|
||||||
|
pdu_type="sy.room.member",
|
||||||
|
is_state=True,
|
||||||
|
state_key=joinee,
|
||||||
|
content={"membership": "join"},
|
||||||
|
origin=self.server_name,
|
||||||
|
destinations=destinations,
|
||||||
|
)
|
||||||
|
yield self.replication_layer.send_pdu(pdu)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def invite_to_room(self, room_name, sender, invitee):
|
||||||
|
""" Invite someone to a room!
|
||||||
|
"""
|
||||||
|
self._on_invite(self.server_name, room_name, invitee)
|
||||||
|
|
||||||
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield self.replication_layer.send_pdu(
|
||||||
|
Pdu.create_new(
|
||||||
|
context=room_name,
|
||||||
|
is_state=True,
|
||||||
|
pdu_type="sy.room.member",
|
||||||
|
state_key=invitee,
|
||||||
|
content={"membership": "invite"},
|
||||||
|
origin=self.server_name,
|
||||||
|
destinations=destinations,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
def backfill(self, room_name, limit=5):
|
||||||
|
room = self.joined_rooms.get(room_name)
|
||||||
|
|
||||||
|
if not room:
|
||||||
|
return
|
||||||
|
|
||||||
|
dest = room.oldest_server
|
||||||
|
|
||||||
|
return self.replication_layer.backfill(dest, room_name, limit)
|
||||||
|
|
||||||
|
def _get_room_remote_servers(self, room_name):
|
||||||
|
return [i for i in self.joined_rooms.setdefault(room_name,).servers]
|
||||||
|
|
||||||
|
def _get_or_create_room(self, room_name):
|
||||||
|
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||||
|
|
||||||
|
def get_servers_for_context(self, context):
|
||||||
|
return defer.succeed(
|
||||||
|
self.joined_rooms.setdefault(context, Room(context)).servers
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main(stdscr):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('user', type=str)
|
||||||
|
parser.add_argument('-v', '--verbose', action='count')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
user = args.user
|
||||||
|
server_name = origin_from_ucid(user)
|
||||||
|
|
||||||
|
## Set up logging ##
|
||||||
|
|
||||||
|
root_logger = logging.getLogger()
|
||||||
|
|
||||||
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(lineno)d - '
|
||||||
|
'%(levelname)s - %(message)s')
|
||||||
|
if not os.path.exists("logs"):
|
||||||
|
os.makedirs("logs")
|
||||||
|
fh = logging.FileHandler("logs/%s" % user)
|
||||||
|
fh.setFormatter(formatter)
|
||||||
|
|
||||||
|
root_logger.addHandler(fh)
|
||||||
|
root_logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Hack: The only way to get it to stop logging to sys.stderr :(
|
||||||
|
log.theLogPublisher.observers = []
|
||||||
|
observer = log.PythonLoggingObserver()
|
||||||
|
observer.start()
|
||||||
|
|
||||||
|
## Set up synapse server
|
||||||
|
|
||||||
|
curses_stdio = cursesio.CursesStdIO(stdscr)
|
||||||
|
input_output = InputOutput(curses_stdio, user)
|
||||||
|
|
||||||
|
curses_stdio.set_callback(input_output)
|
||||||
|
|
||||||
|
app_hs = SynapseHomeServer(server_name, db_name="dbs/%s" % user)
|
||||||
|
replication = app_hs.get_replication_layer()
|
||||||
|
|
||||||
|
hs = HomeServer(server_name, replication, curses_stdio)
|
||||||
|
|
||||||
|
input_output.set_home_server(hs)
|
||||||
|
|
||||||
|
## Add input_output logger
|
||||||
|
io_logger = IOLoggerHandler(input_output)
|
||||||
|
io_logger.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(io_logger)
|
||||||
|
|
||||||
|
## Start! ##
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = int(server_name.split(":")[1])
|
||||||
|
except:
|
||||||
|
port = 12345
|
||||||
|
|
||||||
|
app_hs.get_http_server().start_listening(port)
|
||||||
|
|
||||||
|
reactor.addReader(curses_stdio)
|
||||||
|
|
||||||
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
curses.wrapper(main)
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
# Using the Synapse Grafana dashboard
|
|
||||||
|
|
||||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
|
||||||
1. Have your Prometheus scrape your Synapse. https://matrix-org.github.io/synapse/latest/metrics-howto.html
|
|
||||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
|
||||||
3. Set up required recording rules. [contrib/prometheus](../prometheus)
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -12,30 +12,26 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
|
||||||
import cgi
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import urllib.request
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
import pydot
|
import pydot
|
||||||
|
import cgi
|
||||||
|
import json
|
||||||
|
import datetime
|
||||||
|
import argparse
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
|
||||||
def make_name(pdu_id: str, origin: str) -> str:
|
def make_name(pdu_id, origin):
|
||||||
return f"{pdu_id}@{origin}"
|
return "%s@%s" % (pdu_id, origin)
|
||||||
|
|
||||||
|
|
||||||
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
def make_graph(pdus, room, filename_prefix):
|
||||||
"""
|
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
|
||||||
topological ordering by querying a homeserver.
|
|
||||||
"""
|
|
||||||
pdu_map = {}
|
pdu_map = {}
|
||||||
node_map = {}
|
node_map = {}
|
||||||
|
|
||||||
origins = set()
|
origins = set()
|
||||||
colors = {"red", "green", "blue", "yellow", "purple"}
|
colors = set(("red", "green", "blue", "yellow", "purple"))
|
||||||
|
|
||||||
for pdu in pdus:
|
for pdu in pdus:
|
||||||
origins.add(pdu.get("origin"))
|
origins.add(pdu.get("origin"))
|
||||||
@@ -51,8 +47,8 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|||||||
try:
|
try:
|
||||||
c = colors.pop()
|
c = colors.pop()
|
||||||
color_map[o] = c
|
color_map[o] = c
|
||||||
except Exception:
|
except:
|
||||||
print("Run out of colours!")
|
print "Run out of colours!"
|
||||||
color_map[o] = "black"
|
color_map[o] = "black"
|
||||||
|
|
||||||
graph = pydot.Dot(graph_name="Test")
|
graph = pydot.Dot(graph_name="Test")
|
||||||
@@ -61,9 +57,9 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|||||||
name = make_name(pdu.get("pdu_id"), pdu.get("origin"))
|
name = make_name(pdu.get("pdu_id"), pdu.get("origin"))
|
||||||
pdu_map[name] = pdu
|
pdu_map[name] = pdu
|
||||||
|
|
||||||
t = datetime.datetime.fromtimestamp(float(pdu["ts"]) / 1000).strftime(
|
t = datetime.datetime.fromtimestamp(
|
||||||
"%Y-%m-%d %H:%M:%S,%f"
|
float(pdu["ts"]) / 1000
|
||||||
)
|
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||||
|
|
||||||
label = (
|
label = (
|
||||||
"<"
|
"<"
|
||||||
@@ -83,7 +79,11 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|||||||
"depth": pdu.get("depth"),
|
"depth": pdu.get("depth"),
|
||||||
}
|
}
|
||||||
|
|
||||||
node = pydot.Node(name=name, label=label, color=color_map[pdu.get("origin")])
|
node = pydot.Node(
|
||||||
|
name=name,
|
||||||
|
label=label,
|
||||||
|
color=color_map[pdu.get("origin")]
|
||||||
|
)
|
||||||
node_map[name] = node
|
node_map[name] = node
|
||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
|
|
||||||
@@ -93,7 +93,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|||||||
end_name = make_name(i, o)
|
end_name = make_name(i, o)
|
||||||
|
|
||||||
if end_name not in node_map:
|
if end_name not in node_map:
|
||||||
print("%s not in nodes" % end_name)
|
print "%s not in nodes" % end_name
|
||||||
continue
|
continue
|
||||||
|
|
||||||
edge = pydot.Edge(node_map[start_name], node_map[end_name])
|
edge = pydot.Edge(node_map[start_name], node_map[end_name])
|
||||||
@@ -107,19 +107,20 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|||||||
|
|
||||||
if prev_state_name in node_map:
|
if prev_state_name in node_map:
|
||||||
state_edge = pydot.Edge(
|
state_edge = pydot.Edge(
|
||||||
node_map[start_name], node_map[prev_state_name], style="dotted"
|
node_map[start_name], node_map[prev_state_name],
|
||||||
|
style='dotted'
|
||||||
)
|
)
|
||||||
graph.add_edge(state_edge)
|
graph.add_edge(state_edge)
|
||||||
|
|
||||||
graph.write("%s.dot" % filename_prefix, format="raw", prog="dot")
|
graph.write('%s.dot' % filename_prefix, format='raw', prog='dot')
|
||||||
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
||||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
graph.write_svg("%s.svg" % filename_prefix, prog='dot')
|
||||||
|
|
||||||
|
|
||||||
def get_pdus(host: str, room: str) -> List[dict]:
|
def get_pdus(host, room):
|
||||||
transaction = json.loads(
|
transaction = json.loads(
|
||||||
urllib.request.urlopen(
|
urllib2.urlopen(
|
||||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
"http://%s/_matrix/federation/v1/context/%s/" % (host, room)
|
||||||
).read()
|
).read()
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -129,14 +130,15 @@ def get_pdus(host: str, room: str) -> List[dict]:
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate a PDU graph for a given room by talking "
|
description="Generate a PDU graph for a given room by talking "
|
||||||
"to the given homeserver to get the list of PDUs. \n"
|
"to the given homeserver to get the list of PDUs. \n"
|
||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p", "--prefix", dest="prefix", help="String to prefix output files with"
|
"-p", "--prefix", dest="prefix",
|
||||||
|
help="String to prefix output files with"
|
||||||
)
|
)
|
||||||
parser.add_argument("host")
|
parser.add_argument('host')
|
||||||
parser.add_argument("room")
|
parser.add_argument('room')
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -146,4 +148,4 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
pdus = get_pdus(host, room)
|
pdus = get_pdus(host, room)
|
||||||
|
|
||||||
make_graph(pdus, prefix)
|
make_graph(pdus, room, prefix)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -13,32 +13,22 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import datetime
|
|
||||||
import html
|
|
||||||
import json
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
import pydot
|
import pydot
|
||||||
|
import cgi
|
||||||
|
import json
|
||||||
|
import datetime
|
||||||
|
import argparse
|
||||||
|
|
||||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
from synapse.events import FrozenEvent
|
||||||
from synapse.events import make_event_from_dict
|
|
||||||
from synapse.util.frozenutils import unfreeze
|
from synapse.util.frozenutils import unfreeze
|
||||||
|
|
||||||
|
|
||||||
def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None:
|
def make_graph(db_name, room_id, file_prefix, limit):
|
||||||
"""
|
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
|
||||||
topological ordering by reading from a Synapse SQLite database.
|
|
||||||
"""
|
|
||||||
conn = sqlite3.connect(db_name)
|
conn = sqlite3.connect(db_name)
|
||||||
|
|
||||||
sql = "SELECT room_version FROM rooms WHERE room_id = ?"
|
|
||||||
c = conn.execute(sql, (room_id,))
|
|
||||||
room_version = KNOWN_ROOM_VERSIONS[c.fetchone()[0]]
|
|
||||||
|
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT json, internal_metadata FROM event_json as j "
|
"SELECT json FROM event_json as j "
|
||||||
"INNER JOIN events as e ON e.event_id = j.event_id "
|
"INNER JOIN events as e ON e.event_id = j.event_id "
|
||||||
"WHERE j.room_id = ?"
|
"WHERE j.room_id = ?"
|
||||||
)
|
)
|
||||||
@@ -46,16 +36,16 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
args = [room_id]
|
args = [room_id]
|
||||||
|
|
||||||
if limit:
|
if limit:
|
||||||
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
|
sql += (
|
||||||
|
" ORDER BY topological_ordering DESC, stream_ordering DESC "
|
||||||
|
"LIMIT ?"
|
||||||
|
)
|
||||||
|
|
||||||
args.append(limit)
|
args.append(limit)
|
||||||
|
|
||||||
c = conn.execute(sql, args)
|
c = conn.execute(sql, args)
|
||||||
|
|
||||||
events = [
|
events = [FrozenEvent(json.loads(e[0])) for e in c.fetchall()]
|
||||||
make_event_from_dict(json.loads(e[0]), room_version, json.loads(e[1]))
|
|
||||||
for e in c.fetchall()
|
|
||||||
]
|
|
||||||
|
|
||||||
events.sort(key=lambda e: e.depth)
|
events.sort(key=lambda e: e.depth)
|
||||||
|
|
||||||
@@ -66,8 +56,9 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
c = conn.execute(
|
c = conn.execute(
|
||||||
"SELECT state_group FROM event_to_state_groups WHERE event_id = ?",
|
"SELECT state_group FROM event_to_state_groups "
|
||||||
(event.event_id,),
|
"WHERE event_id = ?",
|
||||||
|
(event.event_id,)
|
||||||
)
|
)
|
||||||
|
|
||||||
res = c.fetchone()
|
res = c.fetchone()
|
||||||
@@ -78,7 +69,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
|
|
||||||
t = datetime.datetime.fromtimestamp(
|
t = datetime.datetime.fromtimestamp(
|
||||||
float(event.origin_server_ts) / 1000
|
float(event.origin_server_ts) / 1000
|
||||||
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||||
|
|
||||||
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
||||||
|
|
||||||
@@ -96,23 +87,29 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
"name": event.event_id,
|
"name": event.event_id,
|
||||||
"type": event.type,
|
"type": event.type,
|
||||||
"state_key": event.get("state_key", None),
|
"state_key": event.get("state_key", None),
|
||||||
"content": html.escape(content, quote=True),
|
"content": cgi.escape(content, quote=True),
|
||||||
"time": t,
|
"time": t,
|
||||||
"depth": event.depth,
|
"depth": event.depth,
|
||||||
"state_group": state_group,
|
"state_group": state_group,
|
||||||
}
|
}
|
||||||
|
|
||||||
node = pydot.Node(name=event.event_id, label=label)
|
node = pydot.Node(
|
||||||
|
name=event.event_id,
|
||||||
|
label=label,
|
||||||
|
)
|
||||||
|
|
||||||
node_map[event.event_id] = node
|
node_map[event.event_id] = node
|
||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
for prev_id in event.prev_event_ids():
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except Exception:
|
except:
|
||||||
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
end_node = pydot.Node(
|
||||||
|
name=prev_id,
|
||||||
|
label="<<b>%s</b>>" % (prev_id,),
|
||||||
|
)
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
graph.add_node(end_node)
|
graph.add_node(end_node)
|
||||||
@@ -124,33 +121,36 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
if len(event_ids) <= 1:
|
if len(event_ids) <= 1:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cluster = pydot.Cluster(str(group), label=f"<State Group: {str(group)}>")
|
cluster = pydot.Cluster(
|
||||||
|
str(group),
|
||||||
|
label="<State Group: %s>" % (str(group),)
|
||||||
|
)
|
||||||
|
|
||||||
for event_id in event_ids:
|
for event_id in event_ids:
|
||||||
cluster.add_node(node_map[event_id])
|
cluster.add_node(node_map[event_id])
|
||||||
|
|
||||||
graph.add_subgraph(cluster)
|
graph.add_subgraph(cluster)
|
||||||
|
|
||||||
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
||||||
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate a PDU graph for a given room by talking "
|
description="Generate a PDU graph for a given room by talking "
|
||||||
"to the given Synapse SQLite file to get the list of PDUs. \n"
|
"to the given homeserver to get the list of PDUs. \n"
|
||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p",
|
"-p", "--prefix", dest="prefix",
|
||||||
"--prefix",
|
|
||||||
dest="prefix",
|
|
||||||
help="String to prefix output files with",
|
help="String to prefix output files with",
|
||||||
default="graph_output",
|
default="graph_output"
|
||||||
)
|
)
|
||||||
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
parser.add_argument(
|
||||||
parser.add_argument("db")
|
"-l", "--limit",
|
||||||
parser.add_argument("room")
|
help="Only retrieve the last N events.",
|
||||||
|
)
|
||||||
|
parser.add_argument('db')
|
||||||
|
parser.add_argument('room')
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|||||||
@@ -1,155 +0,0 @@
|
|||||||
# Copyright 2016 OpenMarket Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import datetime
|
|
||||||
import html
|
|
||||||
import json
|
|
||||||
|
|
||||||
import pydot
|
|
||||||
|
|
||||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
|
||||||
from synapse.events import make_event_from_dict
|
|
||||||
from synapse.util.frozenutils import unfreeze
|
|
||||||
|
|
||||||
|
|
||||||
def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
|
||||||
"""
|
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
|
||||||
topological ordering by reading line-delimited JSON from a file.
|
|
||||||
"""
|
|
||||||
print("Reading lines")
|
|
||||||
with open(file_name) as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
|
|
||||||
print("Read lines")
|
|
||||||
|
|
||||||
# Figure out the room version, assume the first line is the create event.
|
|
||||||
room_version = KNOWN_ROOM_VERSIONS[
|
|
||||||
json.loads(lines[0]).get("content", {}).get("room_version")
|
|
||||||
]
|
|
||||||
|
|
||||||
events = [make_event_from_dict(json.loads(line), room_version) for line in lines]
|
|
||||||
|
|
||||||
print("Loaded events.")
|
|
||||||
|
|
||||||
events.sort(key=lambda e: e.depth)
|
|
||||||
|
|
||||||
print("Sorted events")
|
|
||||||
|
|
||||||
if limit:
|
|
||||||
events = events[-int(limit) :]
|
|
||||||
|
|
||||||
node_map = {}
|
|
||||||
|
|
||||||
graph = pydot.Dot(graph_name="Test")
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
t = datetime.datetime.fromtimestamp(
|
|
||||||
float(event.origin_server_ts) / 1000
|
|
||||||
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
|
||||||
|
|
||||||
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
|
||||||
content = content.replace("\n", "<br/>\n")
|
|
||||||
|
|
||||||
print(content)
|
|
||||||
content = []
|
|
||||||
for key, value in unfreeze(event.get_dict()["content"]).items():
|
|
||||||
if value is None:
|
|
||||||
value = "<null>"
|
|
||||||
elif isinstance(value, str):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
value = json.dumps(value)
|
|
||||||
|
|
||||||
content.append(
|
|
||||||
"<b>%s</b>: %s,"
|
|
||||||
% (
|
|
||||||
html.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
|
||||||
html.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
content = "<br/>\n".join(content)
|
|
||||||
|
|
||||||
print(content)
|
|
||||||
|
|
||||||
label = (
|
|
||||||
"<"
|
|
||||||
"<b>%(name)s </b><br/>"
|
|
||||||
"Type: <b>%(type)s </b><br/>"
|
|
||||||
"State key: <b>%(state_key)s </b><br/>"
|
|
||||||
"Content: <b>%(content)s </b><br/>"
|
|
||||||
"Time: <b>%(time)s </b><br/>"
|
|
||||||
"Depth: <b>%(depth)s </b><br/>"
|
|
||||||
">"
|
|
||||||
) % {
|
|
||||||
"name": event.event_id,
|
|
||||||
"type": event.type,
|
|
||||||
"state_key": event.get("state_key", None),
|
|
||||||
"content": content,
|
|
||||||
"time": t,
|
|
||||||
"depth": event.depth,
|
|
||||||
}
|
|
||||||
|
|
||||||
node = pydot.Node(name=event.event_id, label=label)
|
|
||||||
|
|
||||||
node_map[event.event_id] = node
|
|
||||||
graph.add_node(node)
|
|
||||||
|
|
||||||
print("Created Nodes")
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
for prev_id in event.prev_event_ids():
|
|
||||||
try:
|
|
||||||
end_node = node_map[prev_id]
|
|
||||||
except Exception:
|
|
||||||
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
|
||||||
graph.add_node(end_node)
|
|
||||||
|
|
||||||
edge = pydot.Edge(node_map[event.event_id], end_node)
|
|
||||||
graph.add_edge(edge)
|
|
||||||
|
|
||||||
print("Created edges")
|
|
||||||
|
|
||||||
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
|
||||||
|
|
||||||
print("Created Dot")
|
|
||||||
|
|
||||||
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
|
||||||
|
|
||||||
print("Created svg")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Generate a PDU graph for a given room by reading "
|
|
||||||
"from a file with line deliminated events. \n"
|
|
||||||
"Requires pydot."
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-p",
|
|
||||||
"--prefix",
|
|
||||||
dest="prefix",
|
|
||||||
help="String to prefix output files with",
|
|
||||||
default="graph_output",
|
|
||||||
)
|
|
||||||
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
|
||||||
parser.add_argument("event_file")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
make_graph(args.event_file, args.prefix, args.limit)
|
|
||||||
260
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
260
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
@@ -0,0 +1,260 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
This is an attempt at bridging matrix clients into a Jitis meet room via Matrix
|
||||||
|
video call. It uses hard-coded xml strings overg XMPP BOSH. It can display one
|
||||||
|
of the streams from the Jitsi bridge until the second lot of SDP comes down and
|
||||||
|
we set the remote SDP at which point the stream ends. Our video never gets to
|
||||||
|
the bridge.
|
||||||
|
|
||||||
|
Requires:
|
||||||
|
npm install jquery jsdom
|
||||||
|
"""
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
import grequests
|
||||||
|
from BeautifulSoup import BeautifulSoup
|
||||||
|
import json
|
||||||
|
import urllib
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
|
#ACCESS_TOKEN="" #
|
||||||
|
|
||||||
|
MATRIXBASE = 'https://matrix.org/_matrix/client/api/v1/'
|
||||||
|
MYUSERNAME = '@davetest:matrix.org'
|
||||||
|
|
||||||
|
HTTPBIND = 'https://meet.jit.si/http-bind'
|
||||||
|
#HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||||
|
#ROOMNAME = "matrix"
|
||||||
|
ROOMNAME = "pibble"
|
||||||
|
|
||||||
|
HOST="guest.jit.si"
|
||||||
|
#HOST="jitsi.vuc.me"
|
||||||
|
|
||||||
|
TURNSERVER="turn.guest.jit.si"
|
||||||
|
#TURNSERVER="turn.jitsi.vuc.me"
|
||||||
|
|
||||||
|
ROOMDOMAIN="meet.jit.si"
|
||||||
|
#ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||||
|
|
||||||
|
class TrivialMatrixClient:
|
||||||
|
def __init__(self, access_token):
|
||||||
|
self.token = None
|
||||||
|
self.access_token = access_token
|
||||||
|
|
||||||
|
def getEvent(self):
|
||||||
|
while True:
|
||||||
|
url = MATRIXBASE+'events?access_token='+self.access_token+"&timeout=60000"
|
||||||
|
if self.token:
|
||||||
|
url += "&from="+self.token
|
||||||
|
req = grequests.get(url)
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = json.loads(resps[0].content)
|
||||||
|
print "incoming from matrix",obj
|
||||||
|
if 'end' not in obj:
|
||||||
|
continue
|
||||||
|
self.token = obj['end']
|
||||||
|
if len(obj['chunk']):
|
||||||
|
return obj['chunk'][0]
|
||||||
|
|
||||||
|
def joinRoom(self, roomId):
|
||||||
|
url = MATRIXBASE+'rooms/'+roomId+'/join?access_token='+self.access_token
|
||||||
|
print url
|
||||||
|
headers={ 'Content-Type': 'application/json' }
|
||||||
|
req = grequests.post(url, headers=headers, data='{}')
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = json.loads(resps[0].content)
|
||||||
|
print "response: ",obj
|
||||||
|
|
||||||
|
def sendEvent(self, roomId, evType, event):
|
||||||
|
url = MATRIXBASE+'rooms/'+roomId+'/send/'+evType+'?access_token='+self.access_token
|
||||||
|
print url
|
||||||
|
print json.dumps(event)
|
||||||
|
headers={ 'Content-Type': 'application/json' }
|
||||||
|
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = json.loads(resps[0].content)
|
||||||
|
print "response: ",obj
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
xmppClients = {}
|
||||||
|
|
||||||
|
|
||||||
|
def matrixLoop():
|
||||||
|
while True:
|
||||||
|
ev = matrixCli.getEvent()
|
||||||
|
print ev
|
||||||
|
if ev['type'] == 'm.room.member':
|
||||||
|
print 'membership event'
|
||||||
|
if ev['membership'] == 'invite' and ev['state_key'] == MYUSERNAME:
|
||||||
|
roomId = ev['room_id']
|
||||||
|
print "joining room %s" % (roomId)
|
||||||
|
matrixCli.joinRoom(roomId)
|
||||||
|
elif ev['type'] == 'm.room.message':
|
||||||
|
if ev['room_id'] in xmppClients:
|
||||||
|
print "already have a bridge for that user, ignoring"
|
||||||
|
continue
|
||||||
|
print "got message, connecting"
|
||||||
|
xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||||
|
gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||||
|
elif ev['type'] == 'm.call.invite':
|
||||||
|
print "Incoming call"
|
||||||
|
#sdp = ev['content']['offer']['sdp']
|
||||||
|
#print "sdp: %s" % (sdp)
|
||||||
|
#xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||||
|
#gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||||
|
elif ev['type'] == 'm.call.answer':
|
||||||
|
print "Call answered"
|
||||||
|
sdp = ev['content']['answer']['sdp']
|
||||||
|
if ev['room_id'] not in xmppClients:
|
||||||
|
print "We didn't have a call for that room"
|
||||||
|
continue
|
||||||
|
# should probably check call ID too
|
||||||
|
xmppCli = xmppClients[ev['room_id']]
|
||||||
|
xmppCli.sendAnswer(sdp)
|
||||||
|
elif ev['type'] == 'm.call.hangup':
|
||||||
|
if ev['room_id'] in xmppClients:
|
||||||
|
xmppClients[ev['room_id']].stop()
|
||||||
|
del xmppClients[ev['room_id']]
|
||||||
|
|
||||||
|
class TrivialXmppClient:
|
||||||
|
def __init__(self, matrixRoom, userId):
|
||||||
|
self.rid = 0
|
||||||
|
self.matrixRoom = matrixRoom
|
||||||
|
self.userId = userId
|
||||||
|
self.running = True
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.running = False
|
||||||
|
|
||||||
|
def nextRid(self):
|
||||||
|
self.rid += 1
|
||||||
|
return '%d' % (self.rid)
|
||||||
|
|
||||||
|
def sendIq(self, xml):
|
||||||
|
fullXml = "<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>" % (self.nextRid(), self.sid, xml)
|
||||||
|
#print "\t>>>%s" % (fullXml)
|
||||||
|
return self.xmppPoke(fullXml)
|
||||||
|
|
||||||
|
def xmppPoke(self, xml):
|
||||||
|
headers = {'Content-Type': 'application/xml'}
|
||||||
|
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = BeautifulSoup(resps[0].content)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def sendAnswer(self, answer):
|
||||||
|
print "sdp from matrix client",answer
|
||||||
|
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--sdp'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||||
|
jingle, out_err = p.communicate(answer)
|
||||||
|
jingle = jingle % {
|
||||||
|
'tojid': self.callfrom,
|
||||||
|
'action': 'session-accept',
|
||||||
|
'initiator': self.callfrom,
|
||||||
|
'responder': self.jid,
|
||||||
|
'sid': self.callsid
|
||||||
|
}
|
||||||
|
print "answer jingle from sdp",jingle
|
||||||
|
res = self.sendIq(jingle)
|
||||||
|
print "reply from answer: ",res
|
||||||
|
|
||||||
|
self.ssrcs = {}
|
||||||
|
jingleSoup = BeautifulSoup(jingle)
|
||||||
|
for cont in jingleSoup.iq.jingle.findAll('content'):
|
||||||
|
if cont.description:
|
||||||
|
self.ssrcs[cont['name']] = cont.description['ssrc']
|
||||||
|
print "my ssrcs:",self.ssrcs
|
||||||
|
|
||||||
|
gevent.joinall([
|
||||||
|
gevent.spawn(self.advertiseSsrcs)
|
||||||
|
])
|
||||||
|
|
||||||
|
def advertiseSsrcs(self):
|
||||||
|
time.sleep(7)
|
||||||
|
print "SSRC spammer started"
|
||||||
|
while self.running:
|
||||||
|
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % { 'tojid': "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid), 'nick': self.userId, 'assrc': self.ssrcs['audio'], 'vssrc': self.ssrcs['video'] }
|
||||||
|
res = self.sendIq(ssrcMsg)
|
||||||
|
print "reply from ssrc announce: ",res
|
||||||
|
time.sleep(10)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def xmppLoop(self):
|
||||||
|
self.matrixCallId = time.time()
|
||||||
|
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), HOST))
|
||||||
|
|
||||||
|
print res
|
||||||
|
self.sid = res.body['sid']
|
||||||
|
print "sid %s" % (self.sid)
|
||||||
|
|
||||||
|
res = self.sendIq("<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>")
|
||||||
|
|
||||||
|
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), self.sid, HOST))
|
||||||
|
|
||||||
|
res = self.sendIq("<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>")
|
||||||
|
print res
|
||||||
|
|
||||||
|
self.jid = res.body.iq.bind.jid.string
|
||||||
|
print "jid: %s" % (self.jid)
|
||||||
|
self.shortJid = self.jid.split('-')[0]
|
||||||
|
|
||||||
|
res = self.sendIq("<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>")
|
||||||
|
|
||||||
|
#randomthing = res.body.iq['to']
|
||||||
|
#whatsitpart = randomthing.split('-')[0]
|
||||||
|
|
||||||
|
#print "other random bind thing: %s" % (randomthing)
|
||||||
|
|
||||||
|
# advertise preence to the jitsi room, with our nick
|
||||||
|
res = self.sendIq("<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>" % (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId))
|
||||||
|
self.muc = {'users': []}
|
||||||
|
for p in res.body.findAll('presence'):
|
||||||
|
u = {}
|
||||||
|
u['shortJid'] = p['from'].split('/')[1]
|
||||||
|
if p.c and p.c.nick:
|
||||||
|
u['nick'] = p.c.nick.string
|
||||||
|
self.muc['users'].append(u)
|
||||||
|
print "muc: ",self.muc
|
||||||
|
|
||||||
|
# wait for stuff
|
||||||
|
while True:
|
||||||
|
print "waiting..."
|
||||||
|
res = self.sendIq("")
|
||||||
|
print "got from stream: ",res
|
||||||
|
if res.body.iq:
|
||||||
|
jingles = res.body.iq.findAll('jingle')
|
||||||
|
if len(jingles):
|
||||||
|
self.callfrom = res.body.iq['from']
|
||||||
|
self.handleInvite(jingles[0])
|
||||||
|
elif 'type' in res.body and res.body['type'] == 'terminate':
|
||||||
|
self.running = False
|
||||||
|
del xmppClients[self.matrixRoom]
|
||||||
|
return
|
||||||
|
|
||||||
|
def handleInvite(self, jingle):
|
||||||
|
self.initiator = jingle['initiator']
|
||||||
|
self.callsid = jingle['sid']
|
||||||
|
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--jingle'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||||
|
print "raw jingle invite",str(jingle)
|
||||||
|
sdp, out_err = p.communicate(str(jingle))
|
||||||
|
print "transformed remote offer sdp",sdp
|
||||||
|
inviteEvent = {
|
||||||
|
'offer': {
|
||||||
|
'type': 'offer',
|
||||||
|
'sdp': sdp
|
||||||
|
},
|
||||||
|
'call_id': self.matrixCallId,
|
||||||
|
'version': 0,
|
||||||
|
'lifetime': 30000
|
||||||
|
}
|
||||||
|
matrixCli.sendEvent(self.matrixRoom, 'm.call.invite', inviteEvent)
|
||||||
|
|
||||||
|
matrixCli = TrivialMatrixClient(ACCESS_TOKEN)
|
||||||
|
|
||||||
|
gevent.joinall([
|
||||||
|
gevent.spawn(matrixLoop)
|
||||||
|
])
|
||||||
|
|
||||||
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
diff --git a/syweb/webclient/app/components/matrix/matrix-call.js b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||||
|
index 9fbfff0..dc68077 100644
|
||||||
|
--- a/syweb/webclient/app/components/matrix/matrix-call.js
|
||||||
|
+++ b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||||
|
@@ -16,6 +16,45 @@ limitations under the License.
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
+
|
||||||
|
+function sendKeyframe(pc) {
|
||||||
|
+ console.log('sendkeyframe', pc.iceConnectionState);
|
||||||
|
+ if (pc.iceConnectionState !== 'connected') return; // safe...
|
||||||
|
+ pc.setRemoteDescription(
|
||||||
|
+ pc.remoteDescription,
|
||||||
|
+ function () {
|
||||||
|
+ pc.createAnswer(
|
||||||
|
+ function (modifiedAnswer) {
|
||||||
|
+ pc.setLocalDescription(
|
||||||
|
+ modifiedAnswer,
|
||||||
|
+ function () {
|
||||||
|
+ // noop
|
||||||
|
+ },
|
||||||
|
+ function (error) {
|
||||||
|
+ console.log('triggerKeyframe setLocalDescription failed', error);
|
||||||
|
+ messageHandler.showError();
|
||||||
|
+ }
|
||||||
|
+ );
|
||||||
|
+ },
|
||||||
|
+ function (error) {
|
||||||
|
+ console.log('triggerKeyframe createAnswer failed', error);
|
||||||
|
+ messageHandler.showError();
|
||||||
|
+ }
|
||||||
|
+ );
|
||||||
|
+ },
|
||||||
|
+ function (error) {
|
||||||
|
+ console.log('triggerKeyframe setRemoteDescription failed', error);
|
||||||
|
+ messageHandler.showError();
|
||||||
|
+ }
|
||||||
|
+ );
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
var forAllVideoTracksOnStream = function(s, f) {
|
||||||
|
var tracks = s.getVideoTracks();
|
||||||
|
for (var i = 0; i < tracks.length; i++) {
|
||||||
|
@@ -83,7 +122,7 @@ angular.module('MatrixCall', [])
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: we should prevent any calls from being placed or accepted before this has finished
|
||||||
|
- MatrixCall.getTurnServer();
|
||||||
|
+ //MatrixCall.getTurnServer();
|
||||||
|
|
||||||
|
MatrixCall.CALL_TIMEOUT = 60000;
|
||||||
|
MatrixCall.FALLBACK_STUN_SERVER = 'stun:stun.l.google.com:19302';
|
||||||
|
@@ -132,6 +171,22 @@ angular.module('MatrixCall', [])
|
||||||
|
pc.onsignalingstatechange = function() { self.onSignallingStateChanged(); };
|
||||||
|
pc.onicecandidate = function(c) { self.gotLocalIceCandidate(c); };
|
||||||
|
pc.onaddstream = function(s) { self.onAddStream(s); };
|
||||||
|
+
|
||||||
|
+ var datachan = pc.createDataChannel('RTCDataChannel', {
|
||||||
|
+ reliable: false
|
||||||
|
+ });
|
||||||
|
+ console.log("data chan: "+datachan);
|
||||||
|
+ datachan.onopen = function() {
|
||||||
|
+ console.log("data channel open");
|
||||||
|
+ };
|
||||||
|
+ datachan.onmessage = function() {
|
||||||
|
+ console.log("data channel message");
|
||||||
|
+ };
|
||||||
|
+ pc.ondatachannel = function(event) {
|
||||||
|
+ console.log("have data channel");
|
||||||
|
+ event.channel.binaryType = 'blob';
|
||||||
|
+ };
|
||||||
|
+
|
||||||
|
return pc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -200,6 +255,12 @@ angular.module('MatrixCall', [])
|
||||||
|
}, this.msg.lifetime - event.age);
|
||||||
|
};
|
||||||
|
|
||||||
|
+ MatrixCall.prototype.receivedInvite = function(event) {
|
||||||
|
+ console.log("Got second invite for call "+this.call_id);
|
||||||
|
+ this.peerConn.setRemoteDescription(new RTCSessionDescription(this.msg.offer), this.onSetRemoteDescriptionSuccess, this.onSetRemoteDescriptionError);
|
||||||
|
+ };
|
||||||
|
+
|
||||||
|
+
|
||||||
|
// perverse as it may seem, sometimes we want to instantiate a call with a hangup message
|
||||||
|
// (because when getting the state of the room on load, events come in reverse order and
|
||||||
|
// we want to remember that a call has been hung up)
|
||||||
|
@@ -349,7 +410,7 @@ angular.module('MatrixCall', [])
|
||||||
|
'mandatory': {
|
||||||
|
'OfferToReceiveAudio': true,
|
||||||
|
'OfferToReceiveVideo': this.type == 'video'
|
||||||
|
- },
|
||||||
|
+ }
|
||||||
|
};
|
||||||
|
this.peerConn.createAnswer(function(d) { self.createdAnswer(d); }, function(e) {}, constraints);
|
||||||
|
// This can't be in an apply() because it's called by a predecessor call under glare conditions :(
|
||||||
|
@@ -359,8 +420,20 @@ angular.module('MatrixCall', [])
|
||||||
|
MatrixCall.prototype.gotLocalIceCandidate = function(event) {
|
||||||
|
if (event.candidate) {
|
||||||
|
console.log("Got local ICE "+event.candidate.sdpMid+" candidate: "+event.candidate.candidate);
|
||||||
|
- this.sendCandidate(event.candidate);
|
||||||
|
- }
|
||||||
|
+ //this.sendCandidate(event.candidate);
|
||||||
|
+ } else {
|
||||||
|
+ console.log("have all candidates, sending answer");
|
||||||
|
+ var content = {
|
||||||
|
+ version: 0,
|
||||||
|
+ call_id: this.call_id,
|
||||||
|
+ answer: this.peerConn.localDescription
|
||||||
|
+ };
|
||||||
|
+ this.sendEventWithRetry('m.call.answer', content);
|
||||||
|
+ var self = this;
|
||||||
|
+ $rootScope.$apply(function() {
|
||||||
|
+ self.state = 'connecting';
|
||||||
|
+ });
|
||||||
|
+ }
|
||||||
|
}
|
||||||
|
|
||||||
|
MatrixCall.prototype.gotRemoteIceCandidate = function(cand) {
|
||||||
|
@@ -418,15 +491,6 @@ angular.module('MatrixCall', [])
|
||||||
|
console.log("Created answer: "+description);
|
||||||
|
var self = this;
|
||||||
|
this.peerConn.setLocalDescription(description, function() {
|
||||||
|
- var content = {
|
||||||
|
- version: 0,
|
||||||
|
- call_id: self.call_id,
|
||||||
|
- answer: self.peerConn.localDescription
|
||||||
|
- };
|
||||||
|
- self.sendEventWithRetry('m.call.answer', content);
|
||||||
|
- $rootScope.$apply(function() {
|
||||||
|
- self.state = 'connecting';
|
||||||
|
- });
|
||||||
|
}, function() { console.log("Error setting local description!"); } );
|
||||||
|
};
|
||||||
|
|
||||||
|
@@ -448,6 +512,9 @@ angular.module('MatrixCall', [])
|
||||||
|
$rootScope.$apply(function() {
|
||||||
|
self.state = 'connected';
|
||||||
|
self.didConnect = true;
|
||||||
|
+ /*$timeout(function() {
|
||||||
|
+ sendKeyframe(self.peerConn);
|
||||||
|
+ }, 1000);*/
|
||||||
|
});
|
||||||
|
} else if (this.peerConn.iceConnectionState == 'failed') {
|
||||||
|
this.hangup('ice_failed');
|
||||||
|
@@ -518,6 +585,7 @@ angular.module('MatrixCall', [])
|
||||||
|
|
||||||
|
MatrixCall.prototype.onRemoteStreamEnded = function(event) {
|
||||||
|
console.log("Remote stream ended");
|
||||||
|
+ return;
|
||||||
|
var self = this;
|
||||||
|
$rootScope.$apply(function() {
|
||||||
|
self.state = 'ended';
|
||||||
|
diff --git a/syweb/webclient/app/components/matrix/matrix-phone-service.js b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||||
|
index 55dbbf5..272fa27 100644
|
||||||
|
--- a/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||||
|
+++ b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||||
|
@@ -48,6 +48,13 @@ angular.module('matrixPhoneService', [])
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
+ // do we already have an entry for this call ID?
|
||||||
|
+ var existingEntry = matrixPhoneService.allCalls[msg.call_id];
|
||||||
|
+ if (existingEntry) {
|
||||||
|
+ existingEntry.receivedInvite(msg);
|
||||||
|
+ return;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
var call = undefined;
|
||||||
|
if (!isLive) {
|
||||||
|
// if this event wasn't live then this call may already be over
|
||||||
|
@@ -108,7 +115,7 @@ angular.module('matrixPhoneService', [])
|
||||||
|
call.hangup();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
- $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||||
|
+ $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||||
|
}
|
||||||
|
} else if (event.type == 'm.call.answer') {
|
||||||
|
var call = matrixPhoneService.allCalls[msg.call_id];
|
||||||
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
@@ -0,0 +1,712 @@
|
|||||||
|
/* jshint -W117 */
|
||||||
|
// SDP STUFF
|
||||||
|
function SDP(sdp) {
|
||||||
|
this.media = sdp.split('\r\nm=');
|
||||||
|
for (var i = 1; i < this.media.length; i++) {
|
||||||
|
this.media[i] = 'm=' + this.media[i];
|
||||||
|
if (i != this.media.length - 1) {
|
||||||
|
this.media[i] += '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.session = this.media.shift() + '\r\n';
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.SDP = SDP;
|
||||||
|
|
||||||
|
var jsdom = require("jsdom");
|
||||||
|
var window = jsdom.jsdom().parentWindow;
|
||||||
|
var $ = require('jquery')(window);
|
||||||
|
|
||||||
|
var SDPUtil = require('./strophe.jingle.sdp.util.js').SDPUtil;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns map of MediaChannel mapped per channel idx.
|
||||||
|
*/
|
||||||
|
SDP.prototype.getMediaSsrcMap = function() {
|
||||||
|
var self = this;
|
||||||
|
var media_ssrcs = {};
|
||||||
|
for (channelNum = 0; channelNum < self.media.length; channelNum++) {
|
||||||
|
modified = true;
|
||||||
|
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc:');
|
||||||
|
var type = SDPUtil.parse_mid(SDPUtil.find_line(self.media[channelNum], 'a=mid:'));
|
||||||
|
var channel = new MediaChannel(channelNum, type);
|
||||||
|
media_ssrcs[channelNum] = channel;
|
||||||
|
tmp.forEach(function (line) {
|
||||||
|
var linessrc = line.substring(7).split(' ')[0];
|
||||||
|
// allocate new ChannelSsrc
|
||||||
|
if(!channel.ssrcs[linessrc]) {
|
||||||
|
channel.ssrcs[linessrc] = new ChannelSsrc(linessrc, type);
|
||||||
|
}
|
||||||
|
channel.ssrcs[linessrc].lines.push(line);
|
||||||
|
});
|
||||||
|
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc-group:');
|
||||||
|
tmp.forEach(function(line){
|
||||||
|
var semantics = line.substr(0, idx).substr(13);
|
||||||
|
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||||
|
if (ssrcs.length != 0) {
|
||||||
|
var ssrcGroup = new ChannelSsrcGroup(semantics, ssrcs);
|
||||||
|
channel.ssrcGroups.push(ssrcGroup);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return media_ssrcs;
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns <tt>true</tt> if this SDP contains given SSRC.
|
||||||
|
* @param ssrc the ssrc to check.
|
||||||
|
* @returns {boolean} <tt>true</tt> if this SDP contains given SSRC.
|
||||||
|
*/
|
||||||
|
SDP.prototype.containsSSRC = function(ssrc) {
|
||||||
|
var channels = this.getMediaSsrcMap();
|
||||||
|
var contains = false;
|
||||||
|
Object.keys(channels).forEach(function(chNumber){
|
||||||
|
var channel = channels[chNumber];
|
||||||
|
//console.log("Check", channel, ssrc);
|
||||||
|
if(Object.keys(channel.ssrcs).indexOf(ssrc) != -1){
|
||||||
|
contains = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return contains;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns map of MediaChannel that contains only media not contained in <tt>otherSdp</tt>. Mapped by channel idx.
|
||||||
|
* @param otherSdp the other SDP to check ssrc with.
|
||||||
|
*/
|
||||||
|
SDP.prototype.getNewMedia = function(otherSdp) {
|
||||||
|
|
||||||
|
// this could be useful in Array.prototype.
|
||||||
|
function arrayEquals(array) {
|
||||||
|
// if the other array is a falsy value, return
|
||||||
|
if (!array)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
// compare lengths - can save a lot of time
|
||||||
|
if (this.length != array.length)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
for (var i = 0, l=this.length; i < l; i++) {
|
||||||
|
// Check if we have nested arrays
|
||||||
|
if (this[i] instanceof Array && array[i] instanceof Array) {
|
||||||
|
// recurse into the nested arrays
|
||||||
|
if (!this[i].equals(array[i]))
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else if (this[i] != array[i]) {
|
||||||
|
// Warning - two different object instances will never be equal: {x:20} != {x:20}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
var myMedia = this.getMediaSsrcMap();
|
||||||
|
var othersMedia = otherSdp.getMediaSsrcMap();
|
||||||
|
var newMedia = {};
|
||||||
|
Object.keys(othersMedia).forEach(function(channelNum) {
|
||||||
|
var myChannel = myMedia[channelNum];
|
||||||
|
var othersChannel = othersMedia[channelNum];
|
||||||
|
if(!myChannel && othersChannel) {
|
||||||
|
// Add whole channel
|
||||||
|
newMedia[channelNum] = othersChannel;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Look for new ssrcs accross the channel
|
||||||
|
Object.keys(othersChannel.ssrcs).forEach(function(ssrc) {
|
||||||
|
if(Object.keys(myChannel.ssrcs).indexOf(ssrc) === -1) {
|
||||||
|
// Allocate channel if we've found ssrc that doesn't exist in our channel
|
||||||
|
if(!newMedia[channelNum]){
|
||||||
|
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||||
|
}
|
||||||
|
newMedia[channelNum].ssrcs[ssrc] = othersChannel.ssrcs[ssrc];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Look for new ssrc groups across the channels
|
||||||
|
othersChannel.ssrcGroups.forEach(function(otherSsrcGroup){
|
||||||
|
|
||||||
|
// try to match the other ssrc-group with an ssrc-group of ours
|
||||||
|
var matched = false;
|
||||||
|
for (var i = 0; i < myChannel.ssrcGroups.length; i++) {
|
||||||
|
var mySsrcGroup = myChannel.ssrcGroups[i];
|
||||||
|
if (otherSsrcGroup.semantics == mySsrcGroup.semantics
|
||||||
|
&& arrayEquals.apply(otherSsrcGroup.ssrcs, [mySsrcGroup.ssrcs])) {
|
||||||
|
|
||||||
|
matched = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!matched) {
|
||||||
|
// Allocate channel if we've found an ssrc-group that doesn't
|
||||||
|
// exist in our channel
|
||||||
|
|
||||||
|
if(!newMedia[channelNum]){
|
||||||
|
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||||
|
}
|
||||||
|
newMedia[channelNum].ssrcGroups.push(otherSsrcGroup);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return newMedia;
|
||||||
|
};
|
||||||
|
|
||||||
|
// remove iSAC and CN from SDP
|
||||||
|
SDP.prototype.mangle = function () {
|
||||||
|
var i, j, mline, lines, rtpmap, newdesc;
|
||||||
|
for (i = 0; i < this.media.length; i++) {
|
||||||
|
lines = this.media[i].split('\r\n');
|
||||||
|
lines.pop(); // remove empty last element
|
||||||
|
mline = SDPUtil.parse_mline(lines.shift());
|
||||||
|
if (mline.media != 'audio')
|
||||||
|
continue;
|
||||||
|
newdesc = '';
|
||||||
|
mline.fmt.length = 0;
|
||||||
|
for (j = 0; j < lines.length; j++) {
|
||||||
|
if (lines[j].substr(0, 9) == 'a=rtpmap:') {
|
||||||
|
rtpmap = SDPUtil.parse_rtpmap(lines[j]);
|
||||||
|
if (rtpmap.name == 'CN' || rtpmap.name == 'ISAC')
|
||||||
|
continue;
|
||||||
|
mline.fmt.push(rtpmap.id);
|
||||||
|
newdesc += lines[j] + '\r\n';
|
||||||
|
} else {
|
||||||
|
newdesc += lines[j] + '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.media[i] = SDPUtil.build_mline(mline) + '\r\n';
|
||||||
|
this.media[i] += newdesc;
|
||||||
|
}
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
};
|
||||||
|
|
||||||
|
// remove lines matching prefix from session section
|
||||||
|
SDP.prototype.removeSessionLines = function(prefix) {
|
||||||
|
var self = this;
|
||||||
|
var lines = SDPUtil.find_lines(this.session, prefix);
|
||||||
|
lines.forEach(function(line) {
|
||||||
|
self.session = self.session.replace(line + '\r\n', '');
|
||||||
|
});
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
return lines;
|
||||||
|
}
|
||||||
|
// remove lines matching prefix from a media section specified by mediaindex
|
||||||
|
// TODO: non-numeric mediaindex could match mid
|
||||||
|
SDP.prototype.removeMediaLines = function(mediaindex, prefix) {
|
||||||
|
var self = this;
|
||||||
|
var lines = SDPUtil.find_lines(this.media[mediaindex], prefix);
|
||||||
|
lines.forEach(function(line) {
|
||||||
|
self.media[mediaindex] = self.media[mediaindex].replace(line + '\r\n', '');
|
||||||
|
});
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
return lines;
|
||||||
|
}
|
||||||
|
|
||||||
|
// add content's to a jingle element
|
||||||
|
SDP.prototype.toJingle = function (elem, thecreator) {
|
||||||
|
var i, j, k, mline, ssrc, rtpmap, tmp, line, lines;
|
||||||
|
var self = this;
|
||||||
|
// new bundle plan
|
||||||
|
if (SDPUtil.find_line(this.session, 'a=group:')) {
|
||||||
|
lines = SDPUtil.find_lines(this.session, 'a=group:');
|
||||||
|
for (i = 0; i < lines.length; i++) {
|
||||||
|
tmp = lines[i].split(' ');
|
||||||
|
var semantics = tmp.shift().substr(8);
|
||||||
|
elem.c('group', {xmlns: 'urn:xmpp:jingle:apps:grouping:0', semantics:semantics});
|
||||||
|
for (j = 0; j < tmp.length; j++) {
|
||||||
|
elem.c('content', {name: tmp[j]}).up();
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// old bundle plan, to be removed
|
||||||
|
var bundle = [];
|
||||||
|
if (SDPUtil.find_line(this.session, 'a=group:BUNDLE')) {
|
||||||
|
bundle = SDPUtil.find_line(this.session, 'a=group:BUNDLE ').split(' ');
|
||||||
|
bundle.shift();
|
||||||
|
}
|
||||||
|
for (i = 0; i < this.media.length; i++) {
|
||||||
|
mline = SDPUtil.parse_mline(this.media[i].split('\r\n')[0]);
|
||||||
|
if (!(mline.media === 'audio' ||
|
||||||
|
mline.media === 'video' ||
|
||||||
|
mline.media === 'application'))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=ssrc:')) {
|
||||||
|
ssrc = SDPUtil.find_line(this.media[i], 'a=ssrc:').substring(7).split(' ')[0]; // take the first
|
||||||
|
} else {
|
||||||
|
ssrc = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
elem.c('content', {creator: thecreator, name: mline.media});
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=mid:')) {
|
||||||
|
// prefer identifier from a=mid if present
|
||||||
|
var mid = SDPUtil.parse_mid(SDPUtil.find_line(this.media[i], 'a=mid:'));
|
||||||
|
elem.attrs({ name: mid });
|
||||||
|
|
||||||
|
// old BUNDLE plan, to be removed
|
||||||
|
if (bundle.indexOf(mid) !== -1) {
|
||||||
|
elem.c('bundle', {xmlns: 'http://estos.de/ns/bundle'}).up();
|
||||||
|
bundle.splice(bundle.indexOf(mid), 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=rtpmap:').length)
|
||||||
|
{
|
||||||
|
elem.c('description',
|
||||||
|
{xmlns: 'urn:xmpp:jingle:apps:rtp:1',
|
||||||
|
media: mline.media });
|
||||||
|
if (ssrc) {
|
||||||
|
elem.attrs({ssrc: ssrc});
|
||||||
|
}
|
||||||
|
for (j = 0; j < mline.fmt.length; j++) {
|
||||||
|
rtpmap = SDPUtil.find_line(this.media[i], 'a=rtpmap:' + mline.fmt[j]);
|
||||||
|
elem.c('payload-type', SDPUtil.parse_rtpmap(rtpmap));
|
||||||
|
// put any 'a=fmtp:' + mline.fmt[j] lines into <param name=foo value=bar/>
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j])) {
|
||||||
|
tmp = SDPUtil.parse_fmtp(SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j]));
|
||||||
|
for (k = 0; k < tmp.length; k++) {
|
||||||
|
elem.c('parameter', tmp[k]).up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.RtcpFbToJingle(i, elem, mline.fmt[j]); // XEP-0293 -- map a=rtcp-fb
|
||||||
|
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=crypto:', this.session)) {
|
||||||
|
elem.c('encryption', {required: 1});
|
||||||
|
var crypto = SDPUtil.find_lines(this.media[i], 'a=crypto:', this.session);
|
||||||
|
crypto.forEach(function(line) {
|
||||||
|
elem.c('crypto', SDPUtil.parse_crypto(line)).up();
|
||||||
|
});
|
||||||
|
elem.up(); // end of encryption
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ssrc) {
|
||||||
|
// new style mapping
|
||||||
|
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||||
|
// FIXME: group by ssrc and support multiple different ssrcs
|
||||||
|
var ssrclines = SDPUtil.find_lines(this.media[i], 'a=ssrc:');
|
||||||
|
ssrclines.forEach(function(line) {
|
||||||
|
idx = line.indexOf(' ');
|
||||||
|
var linessrc = line.substr(0, idx).substr(7);
|
||||||
|
if (linessrc != ssrc) {
|
||||||
|
elem.up();
|
||||||
|
ssrc = linessrc;
|
||||||
|
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||||
|
}
|
||||||
|
var kv = line.substr(idx + 1);
|
||||||
|
elem.c('parameter');
|
||||||
|
if (kv.indexOf(':') == -1) {
|
||||||
|
elem.attrs({ name: kv });
|
||||||
|
} else {
|
||||||
|
elem.attrs({ name: kv.split(':', 2)[0] });
|
||||||
|
elem.attrs({ value: kv.split(':', 2)[1] });
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
});
|
||||||
|
elem.up();
|
||||||
|
|
||||||
|
// old proprietary mapping, to be removed at some point
|
||||||
|
tmp = SDPUtil.parse_ssrc(this.media[i]);
|
||||||
|
tmp.xmlns = 'http://estos.de/ns/ssrc';
|
||||||
|
tmp.ssrc = ssrc;
|
||||||
|
elem.c('ssrc', tmp).up(); // ssrc is part of description
|
||||||
|
|
||||||
|
// XEP-0339 handle ssrc-group attributes
|
||||||
|
var ssrc_group_lines = SDPUtil.find_lines(this.media[i], 'a=ssrc-group:');
|
||||||
|
ssrc_group_lines.forEach(function(line) {
|
||||||
|
idx = line.indexOf(' ');
|
||||||
|
var semantics = line.substr(0, idx).substr(13);
|
||||||
|
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||||
|
if (ssrcs.length != 0) {
|
||||||
|
elem.c('ssrc-group', { semantics: semantics, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||||
|
ssrcs.forEach(function(ssrc) {
|
||||||
|
elem.c('source', { ssrc: ssrc })
|
||||||
|
.up();
|
||||||
|
});
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=rtcp-mux')) {
|
||||||
|
elem.c('rtcp-mux').up();
|
||||||
|
}
|
||||||
|
|
||||||
|
// XEP-0293 -- map a=rtcp-fb:*
|
||||||
|
this.RtcpFbToJingle(i, elem, '*');
|
||||||
|
|
||||||
|
// XEP-0294
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=extmap:')) {
|
||||||
|
lines = SDPUtil.find_lines(this.media[i], 'a=extmap:');
|
||||||
|
for (j = 0; j < lines.length; j++) {
|
||||||
|
tmp = SDPUtil.parse_extmap(lines[j]);
|
||||||
|
elem.c('rtp-hdrext', { xmlns: 'urn:xmpp:jingle:apps:rtp:rtp-hdrext:0',
|
||||||
|
uri: tmp.uri,
|
||||||
|
id: tmp.value });
|
||||||
|
if (tmp.hasOwnProperty('direction')) {
|
||||||
|
switch (tmp.direction) {
|
||||||
|
case 'sendonly':
|
||||||
|
elem.attrs({senders: 'responder'});
|
||||||
|
break;
|
||||||
|
case 'recvonly':
|
||||||
|
elem.attrs({senders: 'initiator'});
|
||||||
|
break;
|
||||||
|
case 'sendrecv':
|
||||||
|
elem.attrs({senders: 'both'});
|
||||||
|
break;
|
||||||
|
case 'inactive':
|
||||||
|
elem.attrs({senders: 'none'});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO: handle params
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elem.up(); // end of description
|
||||||
|
}
|
||||||
|
|
||||||
|
// map ice-ufrag/pwd, dtls fingerprint, candidates
|
||||||
|
this.TransportToJingle(i, elem);
|
||||||
|
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=sendrecv', this.session)) {
|
||||||
|
elem.attrs({senders: 'both'});
|
||||||
|
} else if (SDPUtil.find_line(this.media[i], 'a=sendonly', this.session)) {
|
||||||
|
elem.attrs({senders: 'initiator'});
|
||||||
|
} else if (SDPUtil.find_line(this.media[i], 'a=recvonly', this.session)) {
|
||||||
|
elem.attrs({senders: 'responder'});
|
||||||
|
} else if (SDPUtil.find_line(this.media[i], 'a=inactive', this.session)) {
|
||||||
|
elem.attrs({senders: 'none'});
|
||||||
|
}
|
||||||
|
if (mline.port == '0') {
|
||||||
|
// estos hack to reject an m-line
|
||||||
|
elem.attrs({senders: 'rejected'});
|
||||||
|
}
|
||||||
|
elem.up(); // end of content
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
return elem;
|
||||||
|
};
|
||||||
|
|
||||||
|
SDP.prototype.TransportToJingle = function (mediaindex, elem) {
|
||||||
|
var i = mediaindex;
|
||||||
|
var tmp;
|
||||||
|
var self = this;
|
||||||
|
elem.c('transport');
|
||||||
|
|
||||||
|
// XEP-0343 DTLS/SCTP
|
||||||
|
if (SDPUtil.find_line(this.media[mediaindex], 'a=sctpmap:').length)
|
||||||
|
{
|
||||||
|
var sctpmap = SDPUtil.find_line(
|
||||||
|
this.media[i], 'a=sctpmap:', self.session);
|
||||||
|
if (sctpmap)
|
||||||
|
{
|
||||||
|
var sctpAttrs = SDPUtil.parse_sctpmap(sctpmap);
|
||||||
|
elem.c('sctpmap',
|
||||||
|
{
|
||||||
|
xmlns: 'urn:xmpp:jingle:transports:dtls-sctp:1',
|
||||||
|
number: sctpAttrs[0], /* SCTP port */
|
||||||
|
protocol: sctpAttrs[1], /* protocol */
|
||||||
|
});
|
||||||
|
// Optional stream count attribute
|
||||||
|
if (sctpAttrs.length > 2)
|
||||||
|
elem.attrs({ streams: sctpAttrs[2]});
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// XEP-0320
|
||||||
|
var fingerprints = SDPUtil.find_lines(this.media[mediaindex], 'a=fingerprint:', this.session);
|
||||||
|
fingerprints.forEach(function(line) {
|
||||||
|
tmp = SDPUtil.parse_fingerprint(line);
|
||||||
|
tmp.xmlns = 'urn:xmpp:jingle:apps:dtls:0';
|
||||||
|
elem.c('fingerprint').t(tmp.fingerprint);
|
||||||
|
delete tmp.fingerprint;
|
||||||
|
line = SDPUtil.find_line(self.media[mediaindex], 'a=setup:', self.session);
|
||||||
|
if (line) {
|
||||||
|
tmp.setup = line.substr(8);
|
||||||
|
}
|
||||||
|
elem.attrs(tmp);
|
||||||
|
elem.up(); // end of fingerprint
|
||||||
|
});
|
||||||
|
tmp = SDPUtil.iceparams(this.media[mediaindex], this.session);
|
||||||
|
if (tmp) {
|
||||||
|
tmp.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1';
|
||||||
|
elem.attrs(tmp);
|
||||||
|
// XEP-0176
|
||||||
|
if (SDPUtil.find_line(this.media[mediaindex], 'a=candidate:', this.session)) { // add any a=candidate lines
|
||||||
|
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=candidate:', this.session);
|
||||||
|
lines.forEach(function (line) {
|
||||||
|
elem.c('candidate', SDPUtil.candidateToJingle(line)).up();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elem.up(); // end of transport
|
||||||
|
}
|
||||||
|
|
||||||
|
SDP.prototype.RtcpFbToJingle = function (mediaindex, elem, payloadtype) { // XEP-0293
|
||||||
|
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=rtcp-fb:' + payloadtype);
|
||||||
|
lines.forEach(function (line) {
|
||||||
|
var tmp = SDPUtil.parse_rtcpfb(line);
|
||||||
|
if (tmp.type == 'trr-int') {
|
||||||
|
elem.c('rtcp-fb-trr-int', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', value: tmp.params[0]});
|
||||||
|
elem.up();
|
||||||
|
} else {
|
||||||
|
elem.c('rtcp-fb', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', type: tmp.type});
|
||||||
|
if (tmp.params.length > 0) {
|
||||||
|
elem.attrs({'subtype': tmp.params[0]});
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
SDP.prototype.RtcpFbFromJingle = function (elem, payloadtype) { // XEP-0293
|
||||||
|
var media = '';
|
||||||
|
var tmp = elem.find('>rtcp-fb-trr-int[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||||
|
if (tmp.length) {
|
||||||
|
media += 'a=rtcp-fb:' + '*' + ' ' + 'trr-int' + ' ';
|
||||||
|
if (tmp.attr('value')) {
|
||||||
|
media += tmp.attr('value');
|
||||||
|
} else {
|
||||||
|
media += '0';
|
||||||
|
}
|
||||||
|
media += '\r\n';
|
||||||
|
}
|
||||||
|
tmp = elem.find('>rtcp-fb[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||||
|
tmp.each(function () {
|
||||||
|
media += 'a=rtcp-fb:' + payloadtype + ' ' + $(this).attr('type');
|
||||||
|
if ($(this).attr('subtype')) {
|
||||||
|
media += ' ' + $(this).attr('subtype');
|
||||||
|
}
|
||||||
|
media += '\r\n';
|
||||||
|
});
|
||||||
|
return media;
|
||||||
|
};
|
||||||
|
|
||||||
|
// construct an SDP from a jingle stanza
|
||||||
|
SDP.prototype.fromJingle = function (jingle) {
|
||||||
|
var self = this;
|
||||||
|
this.raw = 'v=0\r\n' +
|
||||||
|
'o=- ' + '1923518516' + ' 2 IN IP4 0.0.0.0\r\n' +// FIXME
|
||||||
|
's=-\r\n' +
|
||||||
|
't=0 0\r\n';
|
||||||
|
// http://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-04#section-8
|
||||||
|
if ($(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').length) {
|
||||||
|
$(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').each(function (idx, group) {
|
||||||
|
var contents = $(group).find('>content').map(function (idx, content) {
|
||||||
|
return content.getAttribute('name');
|
||||||
|
}).get();
|
||||||
|
if (contents.length > 0) {
|
||||||
|
self.raw += 'a=group:' + (group.getAttribute('semantics') || group.getAttribute('type')) + ' ' + contents.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else if ($(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').length) {
|
||||||
|
// temporary namespace, not to be used. to be removed soon.
|
||||||
|
$(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').each(function (idx, group) {
|
||||||
|
var contents = $(group).find('>content').map(function (idx, content) {
|
||||||
|
return content.getAttribute('name');
|
||||||
|
}).get();
|
||||||
|
if (group.getAttribute('type') !== null && contents.length > 0) {
|
||||||
|
self.raw += 'a=group:' + group.getAttribute('type') + ' ' + contents.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// for backward compability, to be removed soon
|
||||||
|
// assume all contents are in the same bundle group, can be improved upon later
|
||||||
|
var bundle = $(jingle).find('>content').filter(function (idx, content) {
|
||||||
|
//elem.c('bundle', {xmlns:'http://estos.de/ns/bundle'});
|
||||||
|
return $(content).find('>bundle').length > 0;
|
||||||
|
}).map(function (idx, content) {
|
||||||
|
return content.getAttribute('name');
|
||||||
|
}).get();
|
||||||
|
if (bundle.length) {
|
||||||
|
this.raw += 'a=group:BUNDLE ' + bundle.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.session = this.raw;
|
||||||
|
jingle.find('>content').each(function () {
|
||||||
|
var m = self.jingle2media($(this));
|
||||||
|
self.media.push(m);
|
||||||
|
});
|
||||||
|
|
||||||
|
// reconstruct msid-semantic -- apparently not necessary
|
||||||
|
/*
|
||||||
|
var msid = SDPUtil.parse_ssrc(this.raw);
|
||||||
|
if (msid.hasOwnProperty('mslabel')) {
|
||||||
|
this.session += "a=msid-semantic: WMS " + msid.mslabel + "\r\n";
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
};
|
||||||
|
|
||||||
|
// translate a jingle content element into an an SDP media part
|
||||||
|
SDP.prototype.jingle2media = function (content) {
|
||||||
|
var media = '',
|
||||||
|
desc = content.find('description'),
|
||||||
|
ssrc = desc.attr('ssrc'),
|
||||||
|
self = this,
|
||||||
|
tmp;
|
||||||
|
var sctp = content.find(
|
||||||
|
'>transport>sctpmap[xmlns="urn:xmpp:jingle:transports:dtls-sctp:1"]');
|
||||||
|
|
||||||
|
tmp = { media: desc.attr('media') };
|
||||||
|
tmp.port = '1';
|
||||||
|
if (content.attr('senders') == 'rejected') {
|
||||||
|
// estos hack to reject an m-line.
|
||||||
|
tmp.port = '0';
|
||||||
|
}
|
||||||
|
if (content.find('>transport>fingerprint').length || desc.find('encryption').length) {
|
||||||
|
if (sctp.length)
|
||||||
|
tmp.proto = 'DTLS/SCTP';
|
||||||
|
else
|
||||||
|
tmp.proto = 'RTP/SAVPF';
|
||||||
|
} else {
|
||||||
|
tmp.proto = 'RTP/AVPF';
|
||||||
|
}
|
||||||
|
if (!sctp.length)
|
||||||
|
{
|
||||||
|
tmp.fmt = desc.find('payload-type').map(
|
||||||
|
function () { return this.getAttribute('id'); }).get();
|
||||||
|
media += SDPUtil.build_mline(tmp) + '\r\n';
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
media += 'm=application 1 DTLS/SCTP ' + sctp.attr('number') + '\r\n';
|
||||||
|
media += 'a=sctpmap:' + sctp.attr('number') +
|
||||||
|
' ' + sctp.attr('protocol');
|
||||||
|
|
||||||
|
var streamCount = sctp.attr('streams');
|
||||||
|
if (streamCount)
|
||||||
|
media += ' ' + streamCount + '\r\n';
|
||||||
|
else
|
||||||
|
media += '\r\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
media += 'c=IN IP4 0.0.0.0\r\n';
|
||||||
|
if (!sctp.length)
|
||||||
|
media += 'a=rtcp:1 IN IP4 0.0.0.0\r\n';
|
||||||
|
//tmp = content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||||
|
tmp = content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||||
|
//console.log('transports: '+content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||||
|
//console.log('bundle.transports: '+content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||||
|
//console.log("tmp fingerprint: "+tmp.find('>fingerprint').innerHTML);
|
||||||
|
if (tmp.length) {
|
||||||
|
if (tmp.attr('ufrag')) {
|
||||||
|
media += SDPUtil.build_iceufrag(tmp.attr('ufrag')) + '\r\n';
|
||||||
|
}
|
||||||
|
if (tmp.attr('pwd')) {
|
||||||
|
media += SDPUtil.build_icepwd(tmp.attr('pwd')) + '\r\n';
|
||||||
|
}
|
||||||
|
tmp.find('>fingerprint').each(function () {
|
||||||
|
// FIXME: check namespace at some point
|
||||||
|
media += 'a=fingerprint:' + this.getAttribute('hash');
|
||||||
|
media += ' ' + $(this).text();
|
||||||
|
media += '\r\n';
|
||||||
|
//console.log("mline "+media);
|
||||||
|
if (this.getAttribute('setup')) {
|
||||||
|
media += 'a=setup:' + this.getAttribute('setup') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
switch (content.attr('senders')) {
|
||||||
|
case 'initiator':
|
||||||
|
media += 'a=sendonly\r\n';
|
||||||
|
break;
|
||||||
|
case 'responder':
|
||||||
|
media += 'a=recvonly\r\n';
|
||||||
|
break;
|
||||||
|
case 'none':
|
||||||
|
media += 'a=inactive\r\n';
|
||||||
|
break;
|
||||||
|
case 'both':
|
||||||
|
media += 'a=sendrecv\r\n';
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
media += 'a=mid:' + content.attr('name') + '\r\n';
|
||||||
|
/*if (content.attr('name') == 'video') {
|
||||||
|
media += 'a=x-google-flag:conference' + '\r\n';
|
||||||
|
}*/
|
||||||
|
|
||||||
|
// <description><rtcp-mux/></description>
|
||||||
|
// see http://code.google.com/p/libjingle/issues/detail?id=309 -- no spec though
|
||||||
|
// and http://mail.jabber.org/pipermail/jingle/2011-December/001761.html
|
||||||
|
if (desc.find('rtcp-mux').length) {
|
||||||
|
media += 'a=rtcp-mux\r\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (desc.find('encryption').length) {
|
||||||
|
desc.find('encryption>crypto').each(function () {
|
||||||
|
media += 'a=crypto:' + this.getAttribute('tag');
|
||||||
|
media += ' ' + this.getAttribute('crypto-suite');
|
||||||
|
media += ' ' + this.getAttribute('key-params');
|
||||||
|
if (this.getAttribute('session-params')) {
|
||||||
|
media += ' ' + this.getAttribute('session-params');
|
||||||
|
}
|
||||||
|
media += '\r\n';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
desc.find('payload-type').each(function () {
|
||||||
|
media += SDPUtil.build_rtpmap(this) + '\r\n';
|
||||||
|
if ($(this).find('>parameter').length) {
|
||||||
|
media += 'a=fmtp:' + this.getAttribute('id') + ' ';
|
||||||
|
media += $(this).find('parameter').map(function () { return (this.getAttribute('name') ? (this.getAttribute('name') + '=') : '') + this.getAttribute('value'); }).get().join('; ');
|
||||||
|
media += '\r\n';
|
||||||
|
}
|
||||||
|
// xep-0293
|
||||||
|
media += self.RtcpFbFromJingle($(this), this.getAttribute('id'));
|
||||||
|
});
|
||||||
|
|
||||||
|
// xep-0293
|
||||||
|
media += self.RtcpFbFromJingle(desc, '*');
|
||||||
|
|
||||||
|
// xep-0294
|
||||||
|
tmp = desc.find('>rtp-hdrext[xmlns="urn:xmpp:jingle:apps:rtp:rtp-hdrext:0"]');
|
||||||
|
tmp.each(function () {
|
||||||
|
media += 'a=extmap:' + this.getAttribute('id') + ' ' + this.getAttribute('uri') + '\r\n';
|
||||||
|
});
|
||||||
|
|
||||||
|
content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]>candidate').each(function () {
|
||||||
|
media += SDPUtil.candidateFromJingle(this);
|
||||||
|
});
|
||||||
|
|
||||||
|
// XEP-0339 handle ssrc-group attributes
|
||||||
|
tmp = content.find('description>ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() {
|
||||||
|
var semantics = this.getAttribute('semantics');
|
||||||
|
var ssrcs = $(this).find('>source').map(function() {
|
||||||
|
return this.getAttribute('ssrc');
|
||||||
|
}).get();
|
||||||
|
|
||||||
|
if (ssrcs.length != 0) {
|
||||||
|
media += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tmp = content.find('description>source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]');
|
||||||
|
tmp.each(function () {
|
||||||
|
var ssrc = this.getAttribute('ssrc');
|
||||||
|
$(this).find('>parameter').each(function () {
|
||||||
|
media += 'a=ssrc:' + ssrc + ' ' + this.getAttribute('name');
|
||||||
|
if (this.getAttribute('value') && this.getAttribute('value').length)
|
||||||
|
media += ':' + this.getAttribute('value');
|
||||||
|
media += '\r\n';
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (tmp.length === 0) {
|
||||||
|
// fallback to proprietary mapping of a=ssrc lines
|
||||||
|
tmp = content.find('description>ssrc[xmlns="http://estos.de/ns/ssrc"]');
|
||||||
|
if (tmp.length) {
|
||||||
|
media += 'a=ssrc:' + ssrc + ' cname:' + tmp.attr('cname') + '\r\n';
|
||||||
|
media += 'a=ssrc:' + ssrc + ' msid:' + tmp.attr('msid') + '\r\n';
|
||||||
|
media += 'a=ssrc:' + ssrc + ' mslabel:' + tmp.attr('mslabel') + '\r\n';
|
||||||
|
media += 'a=ssrc:' + ssrc + ' label:' + tmp.attr('label') + '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return media;
|
||||||
|
};
|
||||||
|
|
||||||
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
@@ -0,0 +1,408 @@
|
|||||||
|
/**
|
||||||
|
* Contains utility classes used in SDP class.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class holds a=ssrc lines and media type a=mid
|
||||||
|
* @param ssrc synchronization source identifier number(a=ssrc lines from SDP)
|
||||||
|
* @param type media type eg. "audio" or "video"(a=mid frm SDP)
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
function ChannelSsrc(ssrc, type) {
|
||||||
|
this.ssrc = ssrc;
|
||||||
|
this.type = type;
|
||||||
|
this.lines = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class holds a=ssrc-group: lines
|
||||||
|
* @param semantics
|
||||||
|
* @param ssrcs
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
function ChannelSsrcGroup(semantics, ssrcs, line) {
|
||||||
|
this.semantics = semantics;
|
||||||
|
this.ssrcs = ssrcs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class represents media channel. Is a container for ChannelSsrc, holds channel idx and media type.
|
||||||
|
* @param channelNumber channel idx in SDP media array.
|
||||||
|
* @param mediaType media type(a=mid)
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
function MediaChannel(channelNumber, mediaType) {
|
||||||
|
/**
|
||||||
|
* SDP channel number
|
||||||
|
* @type {*}
|
||||||
|
*/
|
||||||
|
this.chNumber = channelNumber;
|
||||||
|
/**
|
||||||
|
* Channel media type(a=mid)
|
||||||
|
* @type {*}
|
||||||
|
*/
|
||||||
|
this.mediaType = mediaType;
|
||||||
|
/**
|
||||||
|
* The maps of ssrc numbers to ChannelSsrc objects.
|
||||||
|
*/
|
||||||
|
this.ssrcs = {};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The array of ChannelSsrcGroup objects.
|
||||||
|
* @type {Array}
|
||||||
|
*/
|
||||||
|
this.ssrcGroups = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
SDPUtil = {
|
||||||
|
iceparams: function (mediadesc, sessiondesc) {
|
||||||
|
var data = null;
|
||||||
|
if (SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc) &&
|
||||||
|
SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc)) {
|
||||||
|
data = {
|
||||||
|
ufrag: SDPUtil.parse_iceufrag(SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc)),
|
||||||
|
pwd: SDPUtil.parse_icepwd(SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_iceufrag: function (line) {
|
||||||
|
return line.substring(12);
|
||||||
|
},
|
||||||
|
build_iceufrag: function (frag) {
|
||||||
|
return 'a=ice-ufrag:' + frag;
|
||||||
|
},
|
||||||
|
parse_icepwd: function (line) {
|
||||||
|
return line.substring(10);
|
||||||
|
},
|
||||||
|
build_icepwd: function (pwd) {
|
||||||
|
return 'a=ice-pwd:' + pwd;
|
||||||
|
},
|
||||||
|
parse_mid: function (line) {
|
||||||
|
return line.substring(6);
|
||||||
|
},
|
||||||
|
parse_mline: function (line) {
|
||||||
|
var parts = line.substring(2).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.media = parts.shift();
|
||||||
|
data.port = parts.shift();
|
||||||
|
data.proto = parts.shift();
|
||||||
|
if (parts[parts.length - 1] === '') { // trailing whitespace
|
||||||
|
parts.pop();
|
||||||
|
}
|
||||||
|
data.fmt = parts;
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
build_mline: function (mline) {
|
||||||
|
return 'm=' + mline.media + ' ' + mline.port + ' ' + mline.proto + ' ' + mline.fmt.join(' ');
|
||||||
|
},
|
||||||
|
parse_rtpmap: function (line) {
|
||||||
|
var parts = line.substring(9).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.id = parts.shift();
|
||||||
|
parts = parts[0].split('/');
|
||||||
|
data.name = parts.shift();
|
||||||
|
data.clockrate = parts.shift();
|
||||||
|
data.channels = parts.length ? parts.shift() : '1';
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
|
||||||
|
* @param line eg. "a=sctpmap:5000 webrtc-datachannel"
|
||||||
|
* @returns [SCTP port number, protocol, streams]
|
||||||
|
*/
|
||||||
|
parse_sctpmap: function (line)
|
||||||
|
{
|
||||||
|
var parts = line.substring(10).split(' ');
|
||||||
|
var sctpPort = parts[0];
|
||||||
|
var protocol = parts[1];
|
||||||
|
// Stream count is optional
|
||||||
|
var streamCount = parts.length > 2 ? parts[2] : null;
|
||||||
|
return [sctpPort, protocol, streamCount];// SCTP port
|
||||||
|
},
|
||||||
|
build_rtpmap: function (el) {
|
||||||
|
var line = 'a=rtpmap:' + el.getAttribute('id') + ' ' + el.getAttribute('name') + '/' + el.getAttribute('clockrate');
|
||||||
|
if (el.getAttribute('channels') && el.getAttribute('channels') != '1') {
|
||||||
|
line += '/' + el.getAttribute('channels');
|
||||||
|
}
|
||||||
|
return line;
|
||||||
|
},
|
||||||
|
parse_crypto: function (line) {
|
||||||
|
var parts = line.substring(9).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.tag = parts.shift();
|
||||||
|
data['crypto-suite'] = parts.shift();
|
||||||
|
data['key-params'] = parts.shift();
|
||||||
|
if (parts.length) {
|
||||||
|
data['session-params'] = parts.join(' ');
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_fingerprint: function (line) { // RFC 4572
|
||||||
|
var parts = line.substring(14).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.hash = parts.shift();
|
||||||
|
data.fingerprint = parts.shift();
|
||||||
|
// TODO assert that fingerprint satisfies 2UHEX *(":" 2UHEX) ?
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_fmtp: function (line) {
|
||||||
|
var parts = line.split(' '),
|
||||||
|
i, key, value,
|
||||||
|
data = [];
|
||||||
|
parts.shift();
|
||||||
|
parts = parts.join(' ').split(';');
|
||||||
|
for (i = 0; i < parts.length; i++) {
|
||||||
|
key = parts[i].split('=')[0];
|
||||||
|
while (key.length && key[0] == ' ') {
|
||||||
|
key = key.substring(1);
|
||||||
|
}
|
||||||
|
value = parts[i].split('=')[1];
|
||||||
|
if (key && value) {
|
||||||
|
data.push({name: key, value: value});
|
||||||
|
} else if (key) {
|
||||||
|
// rfc 4733 (DTMF) style stuff
|
||||||
|
data.push({name: '', value: key});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_icecandidate: function (line) {
|
||||||
|
var candidate = {},
|
||||||
|
elems = line.split(' ');
|
||||||
|
candidate.foundation = elems[0].substring(12);
|
||||||
|
candidate.component = elems[1];
|
||||||
|
candidate.protocol = elems[2].toLowerCase();
|
||||||
|
candidate.priority = elems[3];
|
||||||
|
candidate.ip = elems[4];
|
||||||
|
candidate.port = elems[5];
|
||||||
|
// elems[6] => "typ"
|
||||||
|
candidate.type = elems[7];
|
||||||
|
candidate.generation = 0; // default value, may be overwritten below
|
||||||
|
for (var i = 8; i < elems.length; i += 2) {
|
||||||
|
switch (elems[i]) {
|
||||||
|
case 'raddr':
|
||||||
|
candidate['rel-addr'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'rport':
|
||||||
|
candidate['rel-port'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'generation':
|
||||||
|
candidate.generation = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'tcptype':
|
||||||
|
candidate.tcptype = elems[i + 1];
|
||||||
|
break;
|
||||||
|
default: // TODO
|
||||||
|
console.log('parse_icecandidate not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
candidate.network = '1';
|
||||||
|
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||||
|
return candidate;
|
||||||
|
},
|
||||||
|
build_icecandidate: function (cand) {
|
||||||
|
var line = ['a=candidate:' + cand.foundation, cand.component, cand.protocol, cand.priority, cand.ip, cand.port, 'typ', cand.type].join(' ');
|
||||||
|
line += ' ';
|
||||||
|
switch (cand.type) {
|
||||||
|
case 'srflx':
|
||||||
|
case 'prflx':
|
||||||
|
case 'relay':
|
||||||
|
if (cand.hasOwnAttribute('rel-addr') && cand.hasOwnAttribute('rel-port')) {
|
||||||
|
line += 'raddr';
|
||||||
|
line += ' ';
|
||||||
|
line += cand['rel-addr'];
|
||||||
|
line += ' ';
|
||||||
|
line += 'rport';
|
||||||
|
line += ' ';
|
||||||
|
line += cand['rel-port'];
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (cand.hasOwnAttribute('tcptype')) {
|
||||||
|
line += 'tcptype';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.tcptype;
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
line += 'generation';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.hasOwnAttribute('generation') ? cand.generation : '0';
|
||||||
|
return line;
|
||||||
|
},
|
||||||
|
parse_ssrc: function (desc) {
|
||||||
|
// proprietary mapping of a=ssrc lines
|
||||||
|
// TODO: see "Jingle RTP Source Description" by Juberti and P. Thatcher on google docs
|
||||||
|
// and parse according to that
|
||||||
|
var lines = desc.split('\r\n'),
|
||||||
|
data = {};
|
||||||
|
for (var i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].substring(0, 7) == 'a=ssrc:') {
|
||||||
|
var idx = lines[i].indexOf(' ');
|
||||||
|
data[lines[i].substr(idx + 1).split(':', 2)[0]] = lines[i].substr(idx + 1).split(':', 2)[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_rtcpfb: function (line) {
|
||||||
|
var parts = line.substr(10).split(' ');
|
||||||
|
var data = {};
|
||||||
|
data.pt = parts.shift();
|
||||||
|
data.type = parts.shift();
|
||||||
|
data.params = parts;
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_extmap: function (line) {
|
||||||
|
var parts = line.substr(9).split(' ');
|
||||||
|
var data = {};
|
||||||
|
data.value = parts.shift();
|
||||||
|
if (data.value.indexOf('/') != -1) {
|
||||||
|
data.direction = data.value.substr(data.value.indexOf('/') + 1);
|
||||||
|
data.value = data.value.substr(0, data.value.indexOf('/'));
|
||||||
|
} else {
|
||||||
|
data.direction = 'both';
|
||||||
|
}
|
||||||
|
data.uri = parts.shift();
|
||||||
|
data.params = parts;
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
find_line: function (haystack, needle, sessionpart) {
|
||||||
|
var lines = haystack.split('\r\n');
|
||||||
|
for (var i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].substring(0, needle.length) == needle) {
|
||||||
|
return lines[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!sessionpart) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// search session part
|
||||||
|
lines = sessionpart.split('\r\n');
|
||||||
|
for (var j = 0; j < lines.length; j++) {
|
||||||
|
if (lines[j].substring(0, needle.length) == needle) {
|
||||||
|
return lines[j];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
find_lines: function (haystack, needle, sessionpart) {
|
||||||
|
var lines = haystack.split('\r\n'),
|
||||||
|
needles = [];
|
||||||
|
for (var i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].substring(0, needle.length) == needle)
|
||||||
|
needles.push(lines[i]);
|
||||||
|
}
|
||||||
|
if (needles.length || !sessionpart) {
|
||||||
|
return needles;
|
||||||
|
}
|
||||||
|
// search session part
|
||||||
|
lines = sessionpart.split('\r\n');
|
||||||
|
for (var j = 0; j < lines.length; j++) {
|
||||||
|
if (lines[j].substring(0, needle.length) == needle) {
|
||||||
|
needles.push(lines[j]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return needles;
|
||||||
|
},
|
||||||
|
candidateToJingle: function (line) {
|
||||||
|
// a=candidate:2979166662 1 udp 2113937151 192.168.2.100 57698 typ host generation 0
|
||||||
|
// <candidate component=... foundation=... generation=... id=... ip=... network=... port=... priority=... protocol=... type=.../>
|
||||||
|
if (line.indexOf('candidate:') === 0) {
|
||||||
|
line = 'a=' + line;
|
||||||
|
} else if (line.substring(0, 12) != 'a=candidate:') {
|
||||||
|
console.log('parseCandidate called with a line that is not a candidate line');
|
||||||
|
console.log(line);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (line.substring(line.length - 2) == '\r\n') // chomp it
|
||||||
|
line = line.substring(0, line.length - 2);
|
||||||
|
var candidate = {},
|
||||||
|
elems = line.split(' '),
|
||||||
|
i;
|
||||||
|
if (elems[6] != 'typ') {
|
||||||
|
console.log('did not find typ in the right place');
|
||||||
|
console.log(line);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
candidate.foundation = elems[0].substring(12);
|
||||||
|
candidate.component = elems[1];
|
||||||
|
candidate.protocol = elems[2].toLowerCase();
|
||||||
|
candidate.priority = elems[3];
|
||||||
|
candidate.ip = elems[4];
|
||||||
|
candidate.port = elems[5];
|
||||||
|
// elems[6] => "typ"
|
||||||
|
candidate.type = elems[7];
|
||||||
|
|
||||||
|
candidate.generation = '0'; // default, may be overwritten below
|
||||||
|
for (i = 8; i < elems.length; i += 2) {
|
||||||
|
switch (elems[i]) {
|
||||||
|
case 'raddr':
|
||||||
|
candidate['rel-addr'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'rport':
|
||||||
|
candidate['rel-port'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'generation':
|
||||||
|
candidate.generation = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'tcptype':
|
||||||
|
candidate.tcptype = elems[i + 1];
|
||||||
|
break;
|
||||||
|
default: // TODO
|
||||||
|
console.log('not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
candidate.network = '1';
|
||||||
|
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||||
|
return candidate;
|
||||||
|
},
|
||||||
|
candidateFromJingle: function (cand) {
|
||||||
|
var line = 'a=candidate:';
|
||||||
|
line += cand.getAttribute('foundation');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('component');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('protocol'); //.toUpperCase(); // chrome M23 doesn't like this
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('priority');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('ip');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('port');
|
||||||
|
line += ' ';
|
||||||
|
line += 'typ';
|
||||||
|
line += ' ' + cand.getAttribute('type');
|
||||||
|
line += ' ';
|
||||||
|
switch (cand.getAttribute('type')) {
|
||||||
|
case 'srflx':
|
||||||
|
case 'prflx':
|
||||||
|
case 'relay':
|
||||||
|
if (cand.getAttribute('rel-addr') && cand.getAttribute('rel-port')) {
|
||||||
|
line += 'raddr';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('rel-addr');
|
||||||
|
line += ' ';
|
||||||
|
line += 'rport';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('rel-port');
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (cand.getAttribute('protocol').toLowerCase() == 'tcp') {
|
||||||
|
line += 'tcptype';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('tcptype');
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
line += 'generation';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('generation') || '0';
|
||||||
|
return line + '\r\n';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.SDPUtil = SDPUtil;
|
||||||
|
|
||||||
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
/**
|
||||||
|
* Wrapper for built-in http.js to emulate the browser XMLHttpRequest object.
|
||||||
|
*
|
||||||
|
* This can be used with JS designed for browsers to improve reuse of code and
|
||||||
|
* allow the use of existing libraries.
|
||||||
|
*
|
||||||
|
* Usage: include("XMLHttpRequest.js") and use XMLHttpRequest per W3C specs.
|
||||||
|
*
|
||||||
|
* @todo SSL Support
|
||||||
|
* @author Dan DeFelippi <dan@driverdan.com>
|
||||||
|
* @license MIT
|
||||||
|
*/
|
||||||
|
|
||||||
|
var Url = require("url")
|
||||||
|
,sys = require("util");
|
||||||
|
|
||||||
|
exports.XMLHttpRequest = function() {
|
||||||
|
/**
|
||||||
|
* Private variables
|
||||||
|
*/
|
||||||
|
var self = this;
|
||||||
|
var http = require('http');
|
||||||
|
var https = require('https');
|
||||||
|
|
||||||
|
// Holds http.js objects
|
||||||
|
var client;
|
||||||
|
var request;
|
||||||
|
var response;
|
||||||
|
|
||||||
|
// Request settings
|
||||||
|
var settings = {};
|
||||||
|
|
||||||
|
// Set some default headers
|
||||||
|
var defaultHeaders = {
|
||||||
|
"User-Agent": "node.js",
|
||||||
|
"Accept": "*/*",
|
||||||
|
};
|
||||||
|
|
||||||
|
var headers = defaultHeaders;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constants
|
||||||
|
*/
|
||||||
|
this.UNSENT = 0;
|
||||||
|
this.OPENED = 1;
|
||||||
|
this.HEADERS_RECEIVED = 2;
|
||||||
|
this.LOADING = 3;
|
||||||
|
this.DONE = 4;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Public vars
|
||||||
|
*/
|
||||||
|
// Current state
|
||||||
|
this.readyState = this.UNSENT;
|
||||||
|
|
||||||
|
// default ready state change handler in case one is not set or is set late
|
||||||
|
this.onreadystatechange = function() {};
|
||||||
|
|
||||||
|
// Result & response
|
||||||
|
this.responseText = "";
|
||||||
|
this.responseXML = "";
|
||||||
|
this.status = null;
|
||||||
|
this.statusText = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open the connection. Currently supports local server requests.
|
||||||
|
*
|
||||||
|
* @param string method Connection method (eg GET, POST)
|
||||||
|
* @param string url URL for the connection.
|
||||||
|
* @param boolean async Asynchronous connection. Default is true.
|
||||||
|
* @param string user Username for basic authentication (optional)
|
||||||
|
* @param string password Password for basic authentication (optional)
|
||||||
|
*/
|
||||||
|
this.open = function(method, url, async, user, password) {
|
||||||
|
settings = {
|
||||||
|
"method": method,
|
||||||
|
"url": url,
|
||||||
|
"async": async || null,
|
||||||
|
"user": user || null,
|
||||||
|
"password": password || null
|
||||||
|
};
|
||||||
|
|
||||||
|
this.abort();
|
||||||
|
|
||||||
|
setState(this.OPENED);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets a header for the request.
|
||||||
|
*
|
||||||
|
* @param string header Header name
|
||||||
|
* @param string value Header value
|
||||||
|
*/
|
||||||
|
this.setRequestHeader = function(header, value) {
|
||||||
|
headers[header] = value;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a header from the server response.
|
||||||
|
*
|
||||||
|
* @param string header Name of header to get.
|
||||||
|
* @return string Text of the header or null if it doesn't exist.
|
||||||
|
*/
|
||||||
|
this.getResponseHeader = function(header) {
|
||||||
|
if (this.readyState > this.OPENED && response.headers[header]) {
|
||||||
|
return header + ": " + response.headers[header];
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all the response headers.
|
||||||
|
*
|
||||||
|
* @return string
|
||||||
|
*/
|
||||||
|
this.getAllResponseHeaders = function() {
|
||||||
|
if (this.readyState < this.HEADERS_RECEIVED) {
|
||||||
|
throw "INVALID_STATE_ERR: Headers have not been received.";
|
||||||
|
}
|
||||||
|
var result = "";
|
||||||
|
|
||||||
|
for (var i in response.headers) {
|
||||||
|
result += i + ": " + response.headers[i] + "\r\n";
|
||||||
|
}
|
||||||
|
return result.substr(0, result.length - 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sends the request to the server.
|
||||||
|
*
|
||||||
|
* @param string data Optional data to send as request body.
|
||||||
|
*/
|
||||||
|
this.send = function(data) {
|
||||||
|
if (this.readyState != this.OPENED) {
|
||||||
|
throw "INVALID_STATE_ERR: connection must be opened before send() is called";
|
||||||
|
}
|
||||||
|
|
||||||
|
var ssl = false;
|
||||||
|
var url = Url.parse(settings.url);
|
||||||
|
|
||||||
|
// Determine the server
|
||||||
|
switch (url.protocol) {
|
||||||
|
case 'https:':
|
||||||
|
ssl = true;
|
||||||
|
// SSL & non-SSL both need host, no break here.
|
||||||
|
case 'http:':
|
||||||
|
var host = url.hostname;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case undefined:
|
||||||
|
case '':
|
||||||
|
var host = "localhost";
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw "Protocol not supported.";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to port 80. If accessing localhost on another port be sure
|
||||||
|
// to use http://localhost:port/path
|
||||||
|
var port = url.port || (ssl ? 443 : 80);
|
||||||
|
// Add query string if one is used
|
||||||
|
var uri = url.pathname + (url.search ? url.search : '');
|
||||||
|
|
||||||
|
// Set the Host header or the server may reject the request
|
||||||
|
this.setRequestHeader("Host", host);
|
||||||
|
|
||||||
|
// Set content length header
|
||||||
|
if (settings.method == "GET" || settings.method == "HEAD") {
|
||||||
|
data = null;
|
||||||
|
} else if (data) {
|
||||||
|
this.setRequestHeader("Content-Length", Buffer.byteLength(data));
|
||||||
|
|
||||||
|
if (!headers["Content-Type"]) {
|
||||||
|
this.setRequestHeader("Content-Type", "text/plain;charset=UTF-8");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the proper protocol
|
||||||
|
var doRequest = ssl ? https.request : http.request;
|
||||||
|
|
||||||
|
var options = {
|
||||||
|
host: host,
|
||||||
|
port: port,
|
||||||
|
path: uri,
|
||||||
|
method: settings.method,
|
||||||
|
headers: headers,
|
||||||
|
agent: false
|
||||||
|
};
|
||||||
|
|
||||||
|
var req = doRequest(options, function(res) {
|
||||||
|
response = res;
|
||||||
|
response.setEncoding("utf8");
|
||||||
|
|
||||||
|
setState(self.HEADERS_RECEIVED);
|
||||||
|
self.status = response.statusCode;
|
||||||
|
|
||||||
|
response.on('data', function(chunk) {
|
||||||
|
// Make sure there's some data
|
||||||
|
if (chunk) {
|
||||||
|
self.responseText += chunk;
|
||||||
|
}
|
||||||
|
setState(self.LOADING);
|
||||||
|
});
|
||||||
|
|
||||||
|
response.on('end', function() {
|
||||||
|
setState(self.DONE);
|
||||||
|
});
|
||||||
|
|
||||||
|
response.on('error', function() {
|
||||||
|
self.handleError(error);
|
||||||
|
});
|
||||||
|
}).on('error', function(error) {
|
||||||
|
self.handleError(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
req.setHeader("Connection", "Close");
|
||||||
|
|
||||||
|
// Node 0.4 and later won't accept empty data. Make sure it's needed.
|
||||||
|
if (data) {
|
||||||
|
req.write(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
req.end();
|
||||||
|
};
|
||||||
|
|
||||||
|
this.handleError = function(error) {
|
||||||
|
this.status = 503;
|
||||||
|
this.statusText = error;
|
||||||
|
this.responseText = error.stack;
|
||||||
|
setState(this.DONE);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aborts a request.
|
||||||
|
*/
|
||||||
|
this.abort = function() {
|
||||||
|
headers = defaultHeaders;
|
||||||
|
this.readyState = this.UNSENT;
|
||||||
|
this.responseText = "";
|
||||||
|
this.responseXML = "";
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Changes readyState and calls onreadystatechange.
|
||||||
|
*
|
||||||
|
* @param int state New state
|
||||||
|
*/
|
||||||
|
var setState = function(state) {
|
||||||
|
self.readyState = state;
|
||||||
|
self.onreadystatechange();
|
||||||
|
}
|
||||||
|
};
|
||||||
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
// This code was written by Tyler Akins and has been placed in the
|
||||||
|
// public domain. It would be nice if you left this header intact.
|
||||||
|
// Base64 code from Tyler Akins -- http://rumkin.com
|
||||||
|
|
||||||
|
var Base64 = (function () {
|
||||||
|
var keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
||||||
|
|
||||||
|
var obj = {
|
||||||
|
/**
|
||||||
|
* Encodes a string in base64
|
||||||
|
* @param {String} input The string to encode in base64.
|
||||||
|
*/
|
||||||
|
encode: function (input) {
|
||||||
|
var output = "";
|
||||||
|
var chr1, chr2, chr3;
|
||||||
|
var enc1, enc2, enc3, enc4;
|
||||||
|
var i = 0;
|
||||||
|
|
||||||
|
do {
|
||||||
|
chr1 = input.charCodeAt(i++);
|
||||||
|
chr2 = input.charCodeAt(i++);
|
||||||
|
chr3 = input.charCodeAt(i++);
|
||||||
|
|
||||||
|
enc1 = chr1 >> 2;
|
||||||
|
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
|
||||||
|
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
|
||||||
|
enc4 = chr3 & 63;
|
||||||
|
|
||||||
|
if (isNaN(chr2)) {
|
||||||
|
enc3 = enc4 = 64;
|
||||||
|
} else if (isNaN(chr3)) {
|
||||||
|
enc4 = 64;
|
||||||
|
}
|
||||||
|
|
||||||
|
output = output + keyStr.charAt(enc1) + keyStr.charAt(enc2) +
|
||||||
|
keyStr.charAt(enc3) + keyStr.charAt(enc4);
|
||||||
|
} while (i < input.length);
|
||||||
|
|
||||||
|
return output;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decodes a base64 string.
|
||||||
|
* @param {String} input The string to decode.
|
||||||
|
*/
|
||||||
|
decode: function (input) {
|
||||||
|
var output = "";
|
||||||
|
var chr1, chr2, chr3;
|
||||||
|
var enc1, enc2, enc3, enc4;
|
||||||
|
var i = 0;
|
||||||
|
|
||||||
|
// remove all characters that are not A-Z, a-z, 0-9, +, /, or =
|
||||||
|
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, '');
|
||||||
|
|
||||||
|
do {
|
||||||
|
enc1 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
enc2 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
enc3 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
enc4 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
|
||||||
|
chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||||
|
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||||
|
chr3 = ((enc3 & 3) << 6) | enc4;
|
||||||
|
|
||||||
|
output = output + String.fromCharCode(chr1);
|
||||||
|
|
||||||
|
if (enc3 != 64) {
|
||||||
|
output = output + String.fromCharCode(chr2);
|
||||||
|
}
|
||||||
|
if (enc4 != 64) {
|
||||||
|
output = output + String.fromCharCode(chr3);
|
||||||
|
}
|
||||||
|
} while (i < input.length);
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Nodify
|
||||||
|
exports.Base64 = Base64;
|
||||||
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
/*
|
||||||
|
* A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
|
||||||
|
* Digest Algorithm, as defined in RFC 1321.
|
||||||
|
* Version 2.1 Copyright (C) Paul Johnston 1999 - 2002.
|
||||||
|
* Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
|
||||||
|
* Distributed under the BSD License
|
||||||
|
* See http://pajhome.org.uk/crypt/md5 for more info.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var MD5 = (function () {
|
||||||
|
/*
|
||||||
|
* Configurable variables. You may need to tweak these to be compatible with
|
||||||
|
* the server-side, but the defaults work in most cases.
|
||||||
|
*/
|
||||||
|
var hexcase = 0; /* hex output format. 0 - lowercase; 1 - uppercase */
|
||||||
|
var b64pad = ""; /* base-64 pad character. "=" for strict RFC compliance */
|
||||||
|
var chrsz = 8; /* bits per input character. 8 - ASCII; 16 - Unicode */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Add integers, wrapping at 2^32. This uses 16-bit operations internally
|
||||||
|
* to work around bugs in some JS interpreters.
|
||||||
|
*/
|
||||||
|
var safe_add = function (x, y) {
|
||||||
|
var lsw = (x & 0xFFFF) + (y & 0xFFFF);
|
||||||
|
var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
|
||||||
|
return (msw << 16) | (lsw & 0xFFFF);
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Bitwise rotate a 32-bit number to the left.
|
||||||
|
*/
|
||||||
|
var bit_rol = function (num, cnt) {
|
||||||
|
return (num << cnt) | (num >>> (32 - cnt));
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert a string to an array of little-endian words
|
||||||
|
* If chrsz is ASCII, characters >255 have their hi-byte silently ignored.
|
||||||
|
*/
|
||||||
|
var str2binl = function (str) {
|
||||||
|
var bin = [];
|
||||||
|
var mask = (1 << chrsz) - 1;
|
||||||
|
for(var i = 0; i < str.length * chrsz; i += chrsz)
|
||||||
|
{
|
||||||
|
bin[i>>5] |= (str.charCodeAt(i / chrsz) & mask) << (i%32);
|
||||||
|
}
|
||||||
|
return bin;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert an array of little-endian words to a string
|
||||||
|
*/
|
||||||
|
var binl2str = function (bin) {
|
||||||
|
var str = "";
|
||||||
|
var mask = (1 << chrsz) - 1;
|
||||||
|
for(var i = 0; i < bin.length * 32; i += chrsz)
|
||||||
|
{
|
||||||
|
str += String.fromCharCode((bin[i>>5] >>> (i % 32)) & mask);
|
||||||
|
}
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert an array of little-endian words to a hex string.
|
||||||
|
*/
|
||||||
|
var binl2hex = function (binarray) {
|
||||||
|
var hex_tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
|
||||||
|
var str = "";
|
||||||
|
for(var i = 0; i < binarray.length * 4; i++)
|
||||||
|
{
|
||||||
|
str += hex_tab.charAt((binarray[i>>2] >> ((i%4)*8+4)) & 0xF) +
|
||||||
|
hex_tab.charAt((binarray[i>>2] >> ((i%4)*8 )) & 0xF);
|
||||||
|
}
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert an array of little-endian words to a base-64 string
|
||||||
|
*/
|
||||||
|
var binl2b64 = function (binarray) {
|
||||||
|
var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||||
|
var str = "";
|
||||||
|
var triplet, j;
|
||||||
|
for(var i = 0; i < binarray.length * 4; i += 3)
|
||||||
|
{
|
||||||
|
triplet = (((binarray[i >> 2] >> 8 * ( i %4)) & 0xFF) << 16) |
|
||||||
|
(((binarray[i+1 >> 2] >> 8 * ((i+1)%4)) & 0xFF) << 8 ) |
|
||||||
|
((binarray[i+2 >> 2] >> 8 * ((i+2)%4)) & 0xFF);
|
||||||
|
for(j = 0; j < 4; j++)
|
||||||
|
{
|
||||||
|
if(i * 8 + j * 6 > binarray.length * 32) { str += b64pad; }
|
||||||
|
else { str += tab.charAt((triplet >> 6*(3-j)) & 0x3F); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* These functions implement the four basic operations the algorithm uses.
|
||||||
|
*/
|
||||||
|
var md5_cmn = function (q, a, b, x, s, t) {
|
||||||
|
return safe_add(bit_rol(safe_add(safe_add(a, q),safe_add(x, t)), s),b);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_ff = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn((b & c) | ((~b) & d), a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_gg = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn((b & d) | (c & (~d)), a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_hh = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn(b ^ c ^ d, a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_ii = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn(c ^ (b | (~d)), a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Calculate the MD5 of an array of little-endian words, and a bit length
|
||||||
|
*/
|
||||||
|
var core_md5 = function (x, len) {
|
||||||
|
/* append padding */
|
||||||
|
x[len >> 5] |= 0x80 << ((len) % 32);
|
||||||
|
x[(((len + 64) >>> 9) << 4) + 14] = len;
|
||||||
|
|
||||||
|
var a = 1732584193;
|
||||||
|
var b = -271733879;
|
||||||
|
var c = -1732584194;
|
||||||
|
var d = 271733878;
|
||||||
|
|
||||||
|
var olda, oldb, oldc, oldd;
|
||||||
|
for (var i = 0; i < x.length; i += 16)
|
||||||
|
{
|
||||||
|
olda = a;
|
||||||
|
oldb = b;
|
||||||
|
oldc = c;
|
||||||
|
oldd = d;
|
||||||
|
|
||||||
|
a = md5_ff(a, b, c, d, x[i+ 0], 7 , -680876936);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+ 1], 12, -389564586);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+ 2], 17, 606105819);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+ 3], 22, -1044525330);
|
||||||
|
a = md5_ff(a, b, c, d, x[i+ 4], 7 , -176418897);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+ 5], 12, 1200080426);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+ 6], 17, -1473231341);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+ 7], 22, -45705983);
|
||||||
|
a = md5_ff(a, b, c, d, x[i+ 8], 7 , 1770035416);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+ 9], 12, -1958414417);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+10], 17, -42063);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+11], 22, -1990404162);
|
||||||
|
a = md5_ff(a, b, c, d, x[i+12], 7 , 1804603682);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+13], 12, -40341101);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+14], 17, -1502002290);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+15], 22, 1236535329);
|
||||||
|
|
||||||
|
a = md5_gg(a, b, c, d, x[i+ 1], 5 , -165796510);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+ 6], 9 , -1069501632);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+11], 14, 643717713);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+ 0], 20, -373897302);
|
||||||
|
a = md5_gg(a, b, c, d, x[i+ 5], 5 , -701558691);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+10], 9 , 38016083);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+15], 14, -660478335);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+ 4], 20, -405537848);
|
||||||
|
a = md5_gg(a, b, c, d, x[i+ 9], 5 , 568446438);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+14], 9 , -1019803690);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+ 3], 14, -187363961);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+ 8], 20, 1163531501);
|
||||||
|
a = md5_gg(a, b, c, d, x[i+13], 5 , -1444681467);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+ 2], 9 , -51403784);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+ 7], 14, 1735328473);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+12], 20, -1926607734);
|
||||||
|
|
||||||
|
a = md5_hh(a, b, c, d, x[i+ 5], 4 , -378558);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+ 8], 11, -2022574463);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+11], 16, 1839030562);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+14], 23, -35309556);
|
||||||
|
a = md5_hh(a, b, c, d, x[i+ 1], 4 , -1530992060);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+ 4], 11, 1272893353);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+ 7], 16, -155497632);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+10], 23, -1094730640);
|
||||||
|
a = md5_hh(a, b, c, d, x[i+13], 4 , 681279174);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+ 0], 11, -358537222);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+ 3], 16, -722521979);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+ 6], 23, 76029189);
|
||||||
|
a = md5_hh(a, b, c, d, x[i+ 9], 4 , -640364487);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+12], 11, -421815835);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+15], 16, 530742520);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+ 2], 23, -995338651);
|
||||||
|
|
||||||
|
a = md5_ii(a, b, c, d, x[i+ 0], 6 , -198630844);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+ 7], 10, 1126891415);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+14], 15, -1416354905);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+ 5], 21, -57434055);
|
||||||
|
a = md5_ii(a, b, c, d, x[i+12], 6 , 1700485571);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+ 3], 10, -1894986606);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+10], 15, -1051523);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+ 1], 21, -2054922799);
|
||||||
|
a = md5_ii(a, b, c, d, x[i+ 8], 6 , 1873313359);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+15], 10, -30611744);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+ 6], 15, -1560198380);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+13], 21, 1309151649);
|
||||||
|
a = md5_ii(a, b, c, d, x[i+ 4], 6 , -145523070);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+11], 10, -1120210379);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+ 2], 15, 718787259);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+ 9], 21, -343485551);
|
||||||
|
|
||||||
|
a = safe_add(a, olda);
|
||||||
|
b = safe_add(b, oldb);
|
||||||
|
c = safe_add(c, oldc);
|
||||||
|
d = safe_add(d, oldd);
|
||||||
|
}
|
||||||
|
return [a, b, c, d];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Calculate the HMAC-MD5, of a key and some data
|
||||||
|
*/
|
||||||
|
var core_hmac_md5 = function (key, data) {
|
||||||
|
var bkey = str2binl(key);
|
||||||
|
if(bkey.length > 16) { bkey = core_md5(bkey, key.length * chrsz); }
|
||||||
|
|
||||||
|
var ipad = new Array(16), opad = new Array(16);
|
||||||
|
for(var i = 0; i < 16; i++)
|
||||||
|
{
|
||||||
|
ipad[i] = bkey[i] ^ 0x36363636;
|
||||||
|
opad[i] = bkey[i] ^ 0x5C5C5C5C;
|
||||||
|
}
|
||||||
|
|
||||||
|
var hash = core_md5(ipad.concat(str2binl(data)), 512 + data.length * chrsz);
|
||||||
|
return core_md5(opad.concat(hash), 512 + 128);
|
||||||
|
};
|
||||||
|
|
||||||
|
var obj = {
|
||||||
|
/*
|
||||||
|
* These are the functions you'll usually want to call.
|
||||||
|
* They take string arguments and return either hex or base-64 encoded
|
||||||
|
* strings.
|
||||||
|
*/
|
||||||
|
hexdigest: function (s) {
|
||||||
|
return binl2hex(core_md5(str2binl(s), s.length * chrsz));
|
||||||
|
},
|
||||||
|
|
||||||
|
b64digest: function (s) {
|
||||||
|
return binl2b64(core_md5(str2binl(s), s.length * chrsz));
|
||||||
|
},
|
||||||
|
|
||||||
|
hash: function (s) {
|
||||||
|
return binl2str(core_md5(str2binl(s), s.length * chrsz));
|
||||||
|
},
|
||||||
|
|
||||||
|
hmac_hexdigest: function (key, data) {
|
||||||
|
return binl2hex(core_hmac_md5(key, data));
|
||||||
|
},
|
||||||
|
|
||||||
|
hmac_b64digest: function (key, data) {
|
||||||
|
return binl2b64(core_hmac_md5(key, data));
|
||||||
|
},
|
||||||
|
|
||||||
|
hmac_hash: function (key, data) {
|
||||||
|
return binl2str(core_hmac_md5(key, data));
|
||||||
|
},
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Perform a simple self-test to see if the VM is working
|
||||||
|
*/
|
||||||
|
test: function () {
|
||||||
|
return MD5.hexdigest("abc") === "900150983cd24fb0d6963f7d28e17f72";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Nodify
|
||||||
|
exports.MD5 = MD5;
|
||||||
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
File diff suppressed because it is too large
Load Diff
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
var strophe = require("./strophe/strophe.js").Strophe;
|
||||||
|
|
||||||
|
var Strophe = strophe.Strophe;
|
||||||
|
var $iq = strophe.$iq;
|
||||||
|
var $msg = strophe.$msg;
|
||||||
|
var $build = strophe.$build;
|
||||||
|
var $pres = strophe.$pres;
|
||||||
|
|
||||||
|
var jsdom = require("jsdom");
|
||||||
|
var window = jsdom.jsdom().parentWindow;
|
||||||
|
var $ = require('jquery')(window);
|
||||||
|
|
||||||
|
var stropheJingle = require("./strophe.jingle.sdp.js");
|
||||||
|
|
||||||
|
|
||||||
|
var input = '';
|
||||||
|
|
||||||
|
process.stdin.on('readable', function() {
|
||||||
|
var chunk = process.stdin.read();
|
||||||
|
if (chunk !== null) {
|
||||||
|
input += chunk;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
process.stdin.on('end', function() {
|
||||||
|
if (process.argv[2] == '--jingle') {
|
||||||
|
var elem = $(input);
|
||||||
|
// app does:
|
||||||
|
// sess.setRemoteDescription($(iq).find('>jingle'), 'offer');
|
||||||
|
//console.log(elem.find('>content'));
|
||||||
|
var sdp = new stropheJingle.SDP('');
|
||||||
|
sdp.fromJingle(elem);
|
||||||
|
console.log(sdp.raw);
|
||||||
|
} else if (process.argv[2] == '--sdp') {
|
||||||
|
var sdp = new stropheJingle.SDP(input);
|
||||||
|
var accept = $iq({to: '%(tojid)s',
|
||||||
|
type: 'set'})
|
||||||
|
.c('jingle', {xmlns: 'urn:xmpp:jingle:1',
|
||||||
|
//action: 'session-accept',
|
||||||
|
action: '%(action)s',
|
||||||
|
initiator: '%(initiator)s',
|
||||||
|
responder: '%(responder)s',
|
||||||
|
sid: '%(sid)s' });
|
||||||
|
sdp.toJingle(accept, 'responder');
|
||||||
|
console.log(Strophe.serialize(accept));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
This directory contains some sample monitoring config for using the
|
|
||||||
'Prometheus' monitoring server against synapse.
|
|
||||||
|
|
||||||
To use it, first install prometheus by following the instructions at
|
|
||||||
|
|
||||||
http://prometheus.io/
|
|
||||||
|
|
||||||
### for Prometheus v1
|
|
||||||
|
|
||||||
Add a new job to the main prometheus.conf file:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
job: {
|
|
||||||
name: "synapse"
|
|
||||||
|
|
||||||
target_group: {
|
|
||||||
target: "http://SERVER.LOCATION.HERE:PORT/_synapse/metrics"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### for Prometheus v2
|
|
||||||
|
|
||||||
Add a new job to the main prometheus.yml file:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- job_name: "synapse"
|
|
||||||
metrics_path: "/_synapse/metrics"
|
|
||||||
# when endpoint uses https:
|
|
||||||
scheme: "https"
|
|
||||||
|
|
||||||
static_configs:
|
|
||||||
- targets: ["my.server.here:port"]
|
|
||||||
```
|
|
||||||
|
|
||||||
An example of a Prometheus configuration with workers can be found in
|
|
||||||
[metrics-howto.md](https://matrix-org.github.io/synapse/latest/metrics-howto.html).
|
|
||||||
|
|
||||||
To use `synapse.rules` add
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
rule_files:
|
|
||||||
- "/PATH/TO/synapse-v2.rules"
|
|
||||||
```
|
|
||||||
|
|
||||||
Metrics are disabled by default when running synapse; they must be enabled
|
|
||||||
with the 'enable-metrics' option, either in the synapse config file or as a
|
|
||||||
command-line option.
|
|
||||||
@@ -1,378 +0,0 @@
|
|||||||
{{ template "head" . }}
|
|
||||||
|
|
||||||
{{ template "prom_content_head" . }}
|
|
||||||
<h1>System Resources</h1>
|
|
||||||
|
|
||||||
<h3>CPU</h3>
|
|
||||||
<div id="process_resource_utime"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#process_resource_utime"),
|
|
||||||
expr: "rate(process_cpu_seconds_total[2m]) * 100",
|
|
||||||
name: "[[job]]-[[index]]",
|
|
||||||
min: 0,
|
|
||||||
max: 100,
|
|
||||||
renderer: "line",
|
|
||||||
height: 150,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "%",
|
|
||||||
yTitle: "CPU Usage"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Memory</h3>
|
|
||||||
<div id="process_resident_memory_bytes"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#process_resident_memory_bytes"),
|
|
||||||
expr: "process_resident_memory_bytes",
|
|
||||||
name: "[[job]]-[[index]]",
|
|
||||||
min: 0,
|
|
||||||
renderer: "line",
|
|
||||||
height: 150,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yUnits: "bytes",
|
|
||||||
yTitle: "Usage"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>File descriptors</h3>
|
|
||||||
<div id="process_fds"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#process_fds"),
|
|
||||||
expr: "process_open_fds",
|
|
||||||
name: "[[job]]-[[index]]",
|
|
||||||
min: 0,
|
|
||||||
renderer: "line",
|
|
||||||
height: 150,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "",
|
|
||||||
yTitle: "Descriptors"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h1>Reactor</h1>
|
|
||||||
|
|
||||||
<h3>Total reactor time</h3>
|
|
||||||
<div id="reactor_total_time"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#reactor_total_time"),
|
|
||||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m])",
|
|
||||||
name: "[[job]]-[[index]]",
|
|
||||||
max: 1,
|
|
||||||
min: 0,
|
|
||||||
renderer: "area",
|
|
||||||
height: 150,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "s/s",
|
|
||||||
yTitle: "Usage"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Average reactor tick time</h3>
|
|
||||||
<div id="reactor_average_time"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#reactor_average_time"),
|
|
||||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m]) / rate(python_twisted_reactor_tick_time_count[2m])",
|
|
||||||
name: "[[job]]-[[index]]",
|
|
||||||
min: 0,
|
|
||||||
renderer: "line",
|
|
||||||
height: 150,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "s",
|
|
||||||
yTitle: "Time"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h1>Storage</h1>
|
|
||||||
|
|
||||||
<h3>Queries</h3>
|
|
||||||
<div id="synapse_storage_query_time"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_storage_query_time"),
|
|
||||||
expr: "sum(rate(synapse_storage_query_time_count[2m])) by (verb)",
|
|
||||||
name: "[[verb]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yUnits: "queries/s",
|
|
||||||
yTitle: "Queries"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Transactions</h3>
|
|
||||||
<div id="synapse_storage_transaction_time"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_storage_transaction_time"),
|
|
||||||
expr: "topk(10, rate(synapse_storage_transaction_time_count[2m]))",
|
|
||||||
name: "[[job]]-[[index]] [[desc]]",
|
|
||||||
min: 0,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yUnits: "txn/s",
|
|
||||||
yTitle: "Transactions"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Transaction execution time</h3>
|
|
||||||
<div id="synapse_storage_transactions_time_sec"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_storage_transactions_time_sec"),
|
|
||||||
expr: "rate(synapse_storage_transaction_time_sum[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[desc]]",
|
|
||||||
min: 0,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "s/s",
|
|
||||||
yTitle: "Usage"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Average time waiting for database connection</h3>
|
|
||||||
<div id="synapse_storage_avg_waiting_time"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_storage_avg_waiting_time"),
|
|
||||||
expr: "rate(synapse_storage_schedule_time_sum[2m]) / rate(synapse_storage_schedule_time_count[2m])",
|
|
||||||
name: "[[job]]-[[index]]",
|
|
||||||
min: 0,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "s",
|
|
||||||
yTitle: "Time"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Cache request rate</h3>
|
|
||||||
<div id="synapse_cache_request_rate"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_cache_request_rate"),
|
|
||||||
expr: "rate(synapse_util_caches_cache:total[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[name]]",
|
|
||||||
min: 0,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yUnits: "rps",
|
|
||||||
yTitle: "Cache request rate"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Cache size</h3>
|
|
||||||
<div id="synapse_cache_size"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_cache_size"),
|
|
||||||
expr: "synapse_util_caches_cache:size",
|
|
||||||
name: "[[job]]-[[index]] [[name]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yUnits: "",
|
|
||||||
yTitle: "Items"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h1>Requests</h1>
|
|
||||||
|
|
||||||
<h3>Requests by Servlet</h3>
|
|
||||||
<div id="synapse_http_server_request_count_servlet"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_http_server_request_count_servlet"),
|
|
||||||
expr: "rate(synapse_http_server_in_flight_requests_count[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "req/s",
|
|
||||||
yTitle: "Requests"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
<h4> (without <tt>EventStreamRestServlet</tt> or <tt>SyncRestServlet</tt>)</h4>
|
|
||||||
<div id="synapse_http_server_request_count_servlet_minus_events"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_http_server_request_count_servlet_minus_events"),
|
|
||||||
expr: "rate(synapse_http_server_in_flight_requests_count{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "req/s",
|
|
||||||
yTitle: "Requests"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Average response times</h3>
|
|
||||||
<div id="synapse_http_server_response_time_avg"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_http_server_response_time_avg"),
|
|
||||||
expr: "rate(synapse_http_server_response_time_seconds_sum[2m]) / rate(synapse_http_server_response_count[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "s/req",
|
|
||||||
yTitle: "Response time"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>All responses by code</h3>
|
|
||||||
<div id="synapse_http_server_responses"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_http_server_responses"),
|
|
||||||
expr: "rate(synapse_http_server_responses[2m])",
|
|
||||||
name: "[[method]] / [[code]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "req/s",
|
|
||||||
yTitle: "Requests"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Error responses by code</h3>
|
|
||||||
<div id="synapse_http_server_responses_err"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_http_server_responses_err"),
|
|
||||||
expr: "rate(synapse_http_server_responses{code=~\"[45]..\"}[2m])",
|
|
||||||
name: "[[method]] / [[code]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "req/s",
|
|
||||||
yTitle: "Requests"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
|
|
||||||
<h3>CPU Usage</h3>
|
|
||||||
<div id="synapse_http_server_response_ru_utime"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_http_server_response_ru_utime"),
|
|
||||||
expr: "rate(synapse_http_server_response_ru_utime_seconds[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "s/s",
|
|
||||||
yTitle: "CPU Usage"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
|
|
||||||
<h3>DB Usage</h3>
|
|
||||||
<div id="synapse_http_server_response_db_txn_duration"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_http_server_response_db_txn_duration"),
|
|
||||||
expr: "rate(synapse_http_server_response_db_txn_duration_seconds[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "s/s",
|
|
||||||
yTitle: "DB Usage"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
|
|
||||||
<h3>Average event send times</h3>
|
|
||||||
<div id="synapse_http_server_send_time_avg"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_http_server_send_time_avg"),
|
|
||||||
expr: "rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "s/req",
|
|
||||||
yTitle: "Response time"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h1>Federation</h1>
|
|
||||||
|
|
||||||
<h3>Sent Messages</h3>
|
|
||||||
<div id="synapse_federation_client_sent"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_federation_client_sent"),
|
|
||||||
expr: "rate(synapse_federation_client_sent[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[type]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "req/s",
|
|
||||||
yTitle: "Requests"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Received Messages</h3>
|
|
||||||
<div id="synapse_federation_server_received"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_federation_server_received"),
|
|
||||||
expr: "rate(synapse_federation_server_received[2m])",
|
|
||||||
name: "[[job]]-[[index]] [[type]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "req/s",
|
|
||||||
yTitle: "Requests"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Pending</h3>
|
|
||||||
<div id="synapse_federation_transaction_queue_pending"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_federation_transaction_queue_pending"),
|
|
||||||
expr: "synapse_federation_transaction_queue_pending",
|
|
||||||
name: "[[type]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yUnits: "",
|
|
||||||
yTitle: "Units"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h1>Clients</h1>
|
|
||||||
|
|
||||||
<h3>Notifiers</h3>
|
|
||||||
<div id="synapse_notifier_listeners"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_notifier_listeners"),
|
|
||||||
expr: "synapse_notifier_listeners",
|
|
||||||
name: "[[job]]-[[index]]",
|
|
||||||
min: 0,
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
|
||||||
yUnits: "",
|
|
||||||
yTitle: "Listeners"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<h3>Notified Events</h3>
|
|
||||||
<div id="synapse_notifier_notified_events"></div>
|
|
||||||
<script>
|
|
||||||
new PromConsole.Graph({
|
|
||||||
node: document.querySelector("#synapse_notifier_notified_events"),
|
|
||||||
expr: "rate(synapse_notifier_notified_events[2m])",
|
|
||||||
name: "[[job]]-[[index]]",
|
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
|
||||||
yUnits: "events/s",
|
|
||||||
yTitle: "Event rate"
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
{{ template "prom_content_tail" . }}
|
|
||||||
|
|
||||||
{{ template "tail" }}
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
groups:
|
|
||||||
- name: synapse
|
|
||||||
rules:
|
|
||||||
|
|
||||||
###
|
|
||||||
### Prometheus Console Only
|
|
||||||
### The following rules are only needed if you use the Prometheus Console
|
|
||||||
### in contrib/prometheus/consoles/synapse.html
|
|
||||||
###
|
|
||||||
- record: 'synapse_federation_client_sent'
|
|
||||||
labels:
|
|
||||||
type: "EDU"
|
|
||||||
expr: 'synapse_federation_client_sent_edus_total + 0'
|
|
||||||
- record: 'synapse_federation_client_sent'
|
|
||||||
labels:
|
|
||||||
type: "PDU"
|
|
||||||
expr: 'synapse_federation_client_sent_pdu_destinations_count_total + 0'
|
|
||||||
- record: 'synapse_federation_client_sent'
|
|
||||||
labels:
|
|
||||||
type: "Query"
|
|
||||||
expr: 'sum(synapse_federation_client_sent_queries) by (job)'
|
|
||||||
|
|
||||||
- record: 'synapse_federation_server_received'
|
|
||||||
labels:
|
|
||||||
type: "EDU"
|
|
||||||
expr: 'synapse_federation_server_received_edus_total + 0'
|
|
||||||
- record: 'synapse_federation_server_received'
|
|
||||||
labels:
|
|
||||||
type: "PDU"
|
|
||||||
expr: 'synapse_federation_server_received_pdus_total + 0'
|
|
||||||
- record: 'synapse_federation_server_received'
|
|
||||||
labels:
|
|
||||||
type: "Query"
|
|
||||||
expr: 'sum(synapse_federation_server_received_queries) by (job)'
|
|
||||||
|
|
||||||
- record: 'synapse_federation_transaction_queue_pending'
|
|
||||||
labels:
|
|
||||||
type: "EDU"
|
|
||||||
expr: 'synapse_federation_transaction_queue_pending_edus + 0'
|
|
||||||
- record: 'synapse_federation_transaction_queue_pending'
|
|
||||||
labels:
|
|
||||||
type: "PDU"
|
|
||||||
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
|
||||||
###
|
|
||||||
### End of 'Prometheus Console Only' rules block
|
|
||||||
###
|
|
||||||
|
|
||||||
|
|
||||||
###
|
|
||||||
### Grafana Only
|
|
||||||
### The following rules are only needed if you use the Grafana dashboard
|
|
||||||
### in contrib/grafana/synapse.json
|
|
||||||
###
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
|
|
||||||
labels:
|
|
||||||
type: remote
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
|
|
||||||
labels:
|
|
||||||
type: local
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
|
|
||||||
labels:
|
|
||||||
type: bridges
|
|
||||||
|
|
||||||
- record: synapse_storage_events_persisted_by_event_type
|
|
||||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
|
|
||||||
|
|
||||||
- record: synapse_storage_events_persisted_by_origin
|
|
||||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
|
|
||||||
###
|
|
||||||
### End of 'Grafana Only' rules block
|
|
||||||
###
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
Purge history API examples
|
|
||||||
==========================
|
|
||||||
|
|
||||||
# `purge_history.sh`
|
|
||||||
|
|
||||||
A bash file, that uses the
|
|
||||||
[purge history API](https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html)
|
|
||||||
to purge all messages in a list of rooms up to a certain event. You can select a
|
|
||||||
timeframe or a number of messages that you want to keep in the room.
|
|
||||||
|
|
||||||
Just configure the variables DOMAIN, ADMIN, ROOMS_ARRAY and TIME at the top of
|
|
||||||
the script.
|
|
||||||
|
|
||||||
# `purge_remote_media.sh`
|
|
||||||
|
|
||||||
A bash file, that uses the
|
|
||||||
[purge history API](https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html)
|
|
||||||
to purge all old cached remote media.
|
|
||||||
@@ -1,143 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# this script will use the api:
|
|
||||||
# https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html
|
|
||||||
#
|
|
||||||
# It will purge all messages in a list of rooms up to a cetrain event
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# define your domain and admin user
|
|
||||||
###################################################################################################
|
|
||||||
# add this user as admin in your home server:
|
|
||||||
DOMAIN=yourserver.tld
|
|
||||||
# add this user as admin in your home server:
|
|
||||||
ADMIN="@you_admin_username:$DOMAIN"
|
|
||||||
|
|
||||||
API_URL="$DOMAIN:8008/_matrix/client/r0"
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
#choose the rooms to prune old messages from (add a free comment at the end)
|
|
||||||
###################################################################################################
|
|
||||||
# the room_id's you can get e.g. from your Riot clients "View Source" button on each message
|
|
||||||
ROOMS_ARRAY=(
|
|
||||||
'!DgvjtOljKujDBrxyHk:matrix.org#riot:matrix.org'
|
|
||||||
'!QtykxKocfZaZOUrTwp:matrix.org#Matrix HQ'
|
|
||||||
)
|
|
||||||
|
|
||||||
# ALTERNATIVELY:
|
|
||||||
# you can select all the rooms that are not encrypted and loop over the result:
|
|
||||||
# SELECT room_id FROM rooms WHERE room_id NOT IN (SELECT DISTINCT room_id FROM events WHERE type ='m.room.encrypted')
|
|
||||||
# or
|
|
||||||
# select all rooms with at least 100 members:
|
|
||||||
# SELECT q.room_id FROM (select count(*) as numberofusers, room_id FROM current_state_events WHERE type ='m.room.member'
|
|
||||||
# GROUP BY room_id) AS q LEFT JOIN room_aliases a ON q.room_id=a.room_id WHERE q.numberofusers > 100 ORDER BY numberofusers desc
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# evaluate the EVENT_ID before which should be pruned
|
|
||||||
###################################################################################################
|
|
||||||
# choose a time before which the messages should be pruned:
|
|
||||||
TIME='12 months ago'
|
|
||||||
# ALTERNATIVELY:
|
|
||||||
# a certain time:
|
|
||||||
# TIME='2016-08-31 23:59:59'
|
|
||||||
|
|
||||||
# creates a timestamp from the given time string:
|
|
||||||
UNIX_TIMESTAMP=$(date +%s%3N --date='TZ="UTC+2" '"$TIME")
|
|
||||||
|
|
||||||
# ALTERNATIVELY:
|
|
||||||
# prune all messages that are older than 1000 messages ago:
|
|
||||||
# LAST_MESSAGES=1000
|
|
||||||
# SQL_GET_EVENT="SELECT event_id from events WHERE type='m.room.message' AND room_id ='$ROOM' ORDER BY received_ts DESC LIMIT 1 offset $(($LAST_MESSAGES - 1))"
|
|
||||||
|
|
||||||
# ALTERNATIVELY:
|
|
||||||
# select the EVENT_ID manually:
|
|
||||||
#EVENT_ID='$1471814088343495zpPNI:matrix.org' # an example event from 21st of Aug 2016 by Matthew
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# make the admin user a server admin in the database with
|
|
||||||
###################################################################################################
|
|
||||||
# psql -A -t --dbname=synapse -c "UPDATE users SET admin=1 WHERE name LIKE '$ADMIN'"
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# database function
|
|
||||||
###################################################################################################
|
|
||||||
sql (){
|
|
||||||
# for sqlite3:
|
|
||||||
#sqlite3 homeserver.db "pragma busy_timeout=20000;$1" | awk '{print $2}'
|
|
||||||
# for postgres:
|
|
||||||
psql -A -t --dbname=synapse -c "$1" | grep -v 'Pager'
|
|
||||||
}
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# get an access token
|
|
||||||
###################################################################################################
|
|
||||||
# for example externally by watching Riot in your browser's network inspector
|
|
||||||
# or internally on the server locally, use this:
|
|
||||||
TOKEN=$(sql "SELECT token FROM access_tokens WHERE user_id='$ADMIN' ORDER BY id DESC LIMIT 1")
|
|
||||||
AUTH="Authorization: Bearer $TOKEN"
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# check, if your TOKEN works. For example this works:
|
|
||||||
###################################################################################################
|
|
||||||
# $ curl --header "$AUTH" "$API_URL/rooms/$ROOM/state/m.room.power_levels"
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# finally start pruning the room:
|
|
||||||
###################################################################################################
|
|
||||||
# this will really delete local events, so the messages in the room really
|
|
||||||
# disappear unless they are restored by remote federation. This is because
|
|
||||||
# we pass {"delete_local_events":true} to the curl invocation below.
|
|
||||||
|
|
||||||
for ROOM in "${ROOMS_ARRAY[@]}"; do
|
|
||||||
echo "########################################### $(date) ################# "
|
|
||||||
echo "pruning room: $ROOM ..."
|
|
||||||
ROOM=${ROOM%#*}
|
|
||||||
#set -x
|
|
||||||
echo "check for alias in db..."
|
|
||||||
# for postgres:
|
|
||||||
sql "SELECT * FROM room_aliases WHERE room_id='$ROOM'"
|
|
||||||
echo "get event..."
|
|
||||||
# for postgres:
|
|
||||||
EVENT_ID=$(sql "SELECT event_id FROM events WHERE type='m.room.message' AND received_ts<'$UNIX_TIMESTAMP' AND room_id='$ROOM' ORDER BY received_ts DESC LIMIT 1;")
|
|
||||||
if [ "$EVENT_ID" == "" ]; then
|
|
||||||
echo "no event $TIME"
|
|
||||||
else
|
|
||||||
echo "event: $EVENT_ID"
|
|
||||||
SLEEP=2
|
|
||||||
set -x
|
|
||||||
# call purge
|
|
||||||
OUT=$(curl --header "$AUTH" -s -d '{"delete_local_events":true}' POST "$API_URL/admin/purge_history/$ROOM/$EVENT_ID")
|
|
||||||
PURGE_ID=$(echo "$OUT" |grep purge_id|cut -d'"' -f4 )
|
|
||||||
if [ "$PURGE_ID" == "" ]; then
|
|
||||||
# probably the history purge is already in progress for $ROOM
|
|
||||||
: "continuing with next room"
|
|
||||||
else
|
|
||||||
while : ; do
|
|
||||||
# get status of purge and sleep longer each time if still active
|
|
||||||
sleep $SLEEP
|
|
||||||
STATUS=$(curl --header "$AUTH" -s GET "$API_URL/admin/purge_history_status/$PURGE_ID" |grep status|cut -d'"' -f4)
|
|
||||||
: "$ROOM --> Status: $STATUS"
|
|
||||||
[[ "$STATUS" == "active" ]] || break
|
|
||||||
SLEEP=$((SLEEP + 1))
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
set +x
|
|
||||||
sleep 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# additionally
|
|
||||||
###################################################################################################
|
|
||||||
# to benefit from pruning large amounts of data, you need to call VACUUM to free the unused space.
|
|
||||||
# This can take a very long time (hours) and the client have to be stopped while you do so:
|
|
||||||
# $ synctl stop
|
|
||||||
# $ sqlite3 -line homeserver.db "vacuum;"
|
|
||||||
# $ synctl start
|
|
||||||
|
|
||||||
# This could be set, so you don't need to prune every time after deleting some rows:
|
|
||||||
# $ sqlite3 homeserver.db "PRAGMA auto_vacuum = FULL;"
|
|
||||||
# be cautious, it could make the database somewhat slow if there are a lot of deletions
|
|
||||||
|
|
||||||
exit
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
DOMAIN=yourserver.tld
|
|
||||||
# add this user as admin in your home server:
|
|
||||||
ADMIN="@you_admin_username:$DOMAIN"
|
|
||||||
|
|
||||||
API_URL="$DOMAIN:8008/_matrix/client/r0"
|
|
||||||
|
|
||||||
# choose a time before which the messages should be pruned:
|
|
||||||
# TIME='2016-08-31 23:59:59'
|
|
||||||
TIME='12 months ago'
|
|
||||||
|
|
||||||
# creates a timestamp from the given time string:
|
|
||||||
UNIX_TIMESTAMP=$(date +%s%3N --date='TZ="UTC+2" '"$TIME")
|
|
||||||
|
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# database function
|
|
||||||
###################################################################################################
|
|
||||||
sql (){
|
|
||||||
# for sqlite3:
|
|
||||||
#sqlite3 homeserver.db "pragma busy_timeout=20000;$1" | awk '{print $2}'
|
|
||||||
# for postgres:
|
|
||||||
psql -A -t --dbname=synapse -c "$1" | grep -v 'Pager'
|
|
||||||
}
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# make the admin user a server admin in the database with
|
|
||||||
###############################################################################
|
|
||||||
# sql "UPDATE users SET admin=1 WHERE name LIKE '$ADMIN'"
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# get an access token
|
|
||||||
###############################################################################
|
|
||||||
# for example externally by watching Riot in your browser's network inspector
|
|
||||||
# or internally on the server locally, use this:
|
|
||||||
TOKEN=$(sql "SELECT token FROM access_tokens WHERE user_id='$ADMIN' ORDER BY id DESC LIMIT 1")
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# check, if your TOKEN works. For example this works:
|
|
||||||
###############################################################################
|
|
||||||
# curl --header "Authorization: Bearer $TOKEN" "$API_URL/rooms/$ROOM/state/m.room.power_levels"
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# optional check size before
|
|
||||||
###############################################################################
|
|
||||||
# echo calculate used storage before ...
|
|
||||||
# du -shc ../.synapse/media_store/*
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# finally start pruning media:
|
|
||||||
###############################################################################
|
|
||||||
set -x # for debugging the generated string
|
|
||||||
curl --header "Authorization: Bearer $TOKEN" -X POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
name: matrix-synapse
|
|
||||||
base: core18
|
|
||||||
version: git
|
|
||||||
summary: Reference Matrix homeserver
|
|
||||||
description: |
|
|
||||||
Synapse is the reference Matrix homeserver.
|
|
||||||
Matrix is a federated and decentralised instant messaging and VoIP system.
|
|
||||||
|
|
||||||
grade: stable
|
|
||||||
confinement: strict
|
|
||||||
|
|
||||||
apps:
|
|
||||||
matrix-synapse:
|
|
||||||
command: synctl --no-daemonize start $SNAP_COMMON/homeserver.yaml
|
|
||||||
stop-command: synctl -c $SNAP_COMMON stop
|
|
||||||
plugs: [network-bind, network]
|
|
||||||
daemon: simple
|
|
||||||
hash-password:
|
|
||||||
command: hash_password
|
|
||||||
generate-config:
|
|
||||||
command: generate_config
|
|
||||||
generate-signing-key:
|
|
||||||
command: generate_signing_key
|
|
||||||
register-new-matrix-user:
|
|
||||||
command: register_new_matrix_user
|
|
||||||
plugs: [network]
|
|
||||||
synctl:
|
|
||||||
command: synctl
|
|
||||||
parts:
|
|
||||||
matrix-synapse:
|
|
||||||
source: .
|
|
||||||
plugin: python
|
|
||||||
python-version: python3
|
|
||||||
python-packages:
|
|
||||||
- '.[all]'
|
|
||||||
- pip
|
|
||||||
- setuptools
|
|
||||||
- setuptools-scm
|
|
||||||
- wheel
|
|
||||||
build-packages:
|
|
||||||
- libffi-dev
|
|
||||||
- libturbojpeg0-dev
|
|
||||||
- libssl-dev
|
|
||||||
- libxslt1-dev
|
|
||||||
- libpq-dev
|
|
||||||
- zlib1g-dev
|
|
||||||
stage-packages:
|
|
||||||
- libasn1-8-heimdal
|
|
||||||
- libgssapi3-heimdal
|
|
||||||
- libhcrypto4-heimdal
|
|
||||||
- libheimbase1-heimdal
|
|
||||||
- libheimntlm0-heimdal
|
|
||||||
- libhx509-5-heimdal
|
|
||||||
- libkrb5-26-heimdal
|
|
||||||
- libldap-2.4-2
|
|
||||||
- libpq5
|
|
||||||
- libsasl2-2
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
The documentation for using systemd to manage synapse workers is now part of
|
|
||||||
the main synapse distribution. See
|
|
||||||
[docs/systemd-with-workers](https://matrix-org.github.io/synapse/latest/systemd-with-workers/index.html).
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# Setup Synapse with Systemd
|
|
||||||
This is a setup for managing synapse with a user contributed systemd unit
|
|
||||||
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
|
||||||
to accommodate your installation in accordance with the installation
|
|
||||||
instructions provided in
|
|
||||||
[installation instructions](https://matrix-org.github.io/synapse/latest/setup/installation.html).
|
|
||||||
|
|
||||||
## Setup
|
|
||||||
1. Under the service section, ensure the `User` variable matches which user
|
|
||||||
you installed synapse under and wish to run it as.
|
|
||||||
2. Under the service section, ensure the `WorkingDirectory` variable matches
|
|
||||||
where you have installed synapse.
|
|
||||||
3. Under the service section, ensure the `ExecStart` variable matches the
|
|
||||||
appropriate locations of your installation.
|
|
||||||
4. Copy the `matrix-synapse.service` to `/etc/systemd/system/`
|
|
||||||
5. Start Synapse: `sudo systemctl start matrix-synapse`
|
|
||||||
6. Verify Synapse is running: `sudo systemctl status matrix-synapse`
|
|
||||||
7. *optional* Enable Synapse to start at system boot: `sudo systemctl enable matrix-synapse`
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
version: 1
|
|
||||||
|
|
||||||
# In systemd's journal, loglevel is implicitly stored, so let's omit it
|
|
||||||
# from the message text.
|
|
||||||
formatters:
|
|
||||||
journal_fmt:
|
|
||||||
format: '%(name)s: [%(request)s] %(message)s'
|
|
||||||
|
|
||||||
filters:
|
|
||||||
context:
|
|
||||||
(): synapse.logging.context.LoggingContextFilter
|
|
||||||
request: ""
|
|
||||||
|
|
||||||
handlers:
|
|
||||||
journal:
|
|
||||||
class: systemd.journal.JournalHandler
|
|
||||||
formatter: journal_fmt
|
|
||||||
filters: [context]
|
|
||||||
SYSLOG_IDENTIFIER: synapse
|
|
||||||
|
|
||||||
root:
|
|
||||||
level: INFO
|
|
||||||
handlers: [journal]
|
|
||||||
|
|
||||||
disable_existing_loggers: False
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
# Example systemd configuration file for synapse. Copy into
|
|
||||||
# /etc/systemd/system/, update the paths if necessary, then:
|
|
||||||
#
|
|
||||||
# systemctl enable matrix-synapse
|
|
||||||
# systemctl start matrix-synapse
|
|
||||||
#
|
|
||||||
# This assumes that Synapse has been installed by a user named
|
|
||||||
# synapse.
|
|
||||||
#
|
|
||||||
# This assumes that Synapse has been installed in a virtualenv in
|
|
||||||
# the user's home directory: `/home/synapse/synapse/env`.
|
|
||||||
#
|
|
||||||
# **NOTE:** This is an example service file that may change in the future. If you
|
|
||||||
# wish to use this please copy rather than symlink it.
|
|
||||||
|
|
||||||
[Unit]
|
|
||||||
Description=Synapse Matrix homeserver
|
|
||||||
# If you are using postgresql to persist data, uncomment this line to make sure
|
|
||||||
# synapse starts after the postgresql service.
|
|
||||||
# After=postgresql.service
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=notify
|
|
||||||
NotifyAccess=main
|
|
||||||
ExecReload=/bin/kill -HUP $MAINPID
|
|
||||||
Restart=on-abort
|
|
||||||
|
|
||||||
User=synapse
|
|
||||||
Group=nogroup
|
|
||||||
|
|
||||||
WorkingDirectory=/home/synapse/synapse
|
|
||||||
ExecStart=/home/synapse/synapse/env/bin/python -m synapse.app.homeserver --config-path=/home/synapse/synapse/homeserver.yaml
|
|
||||||
SyslogIdentifier=matrix-synapse
|
|
||||||
|
|
||||||
# adjust the cache factor if necessary
|
|
||||||
# Environment=SYNAPSE_CACHE_FACTOR=2.0
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
[Service]
|
|
||||||
# The following directives give the synapse service R/W access to:
|
|
||||||
# - /run/matrix-synapse
|
|
||||||
# - /var/lib/matrix-synapse
|
|
||||||
# - /var/log/matrix-synapse
|
|
||||||
|
|
||||||
RuntimeDirectory=matrix-synapse
|
|
||||||
StateDirectory=matrix-synapse
|
|
||||||
LogsDirectory=matrix-synapse
|
|
||||||
|
|
||||||
######################
|
|
||||||
## Security Sandbox ##
|
|
||||||
######################
|
|
||||||
|
|
||||||
# Make sure that the service has its own unshared tmpfs at /tmp and that it
|
|
||||||
# cannot see or change any real devices
|
|
||||||
PrivateTmp=true
|
|
||||||
PrivateDevices=true
|
|
||||||
|
|
||||||
# We give no capabilities to a service by default
|
|
||||||
CapabilityBoundingSet=
|
|
||||||
AmbientCapabilities=
|
|
||||||
|
|
||||||
# Protect the following from modification:
|
|
||||||
# - The entire filesystem
|
|
||||||
# - sysctl settings and loaded kernel modules
|
|
||||||
# - No modifications allowed to Control Groups
|
|
||||||
# - Hostname
|
|
||||||
# - System Clock
|
|
||||||
ProtectSystem=strict
|
|
||||||
ProtectKernelTunables=true
|
|
||||||
ProtectKernelModules=true
|
|
||||||
ProtectControlGroups=true
|
|
||||||
ProtectClock=true
|
|
||||||
ProtectHostname=true
|
|
||||||
|
|
||||||
# Prevent access to the following:
|
|
||||||
# - /home directory
|
|
||||||
# - Kernel logs
|
|
||||||
ProtectHome=tmpfs
|
|
||||||
ProtectKernelLogs=true
|
|
||||||
|
|
||||||
# Make sure that the process can only see PIDs and process details of itself,
|
|
||||||
# and the second option disables seeing details of things like system load and
|
|
||||||
# I/O etc
|
|
||||||
ProtectProc=invisible
|
|
||||||
ProcSubset=pid
|
|
||||||
|
|
||||||
# While not needed, we set these options explicitly
|
|
||||||
# - This process has been given access to the host network
|
|
||||||
# - It can also communicate with any IP Address
|
|
||||||
PrivateNetwork=false
|
|
||||||
RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX
|
|
||||||
IPAddressAllow=any
|
|
||||||
|
|
||||||
# Restrict system calls to a sane bunch
|
|
||||||
SystemCallArchitectures=native
|
|
||||||
SystemCallFilter=@system-service
|
|
||||||
SystemCallFilter=~@privileged @resources @obsolete
|
|
||||||
|
|
||||||
# Misc restrictions
|
|
||||||
# - Since the process is a python process it needs to be able to write and
|
|
||||||
# execute memory regions, so we set MemoryDenyWriteExecute to false
|
|
||||||
RestrictSUIDSGID=true
|
|
||||||
RemoveIPC=true
|
|
||||||
NoNewPrivileges=true
|
|
||||||
RestrictRealtime=true
|
|
||||||
RestrictNamespaces=true
|
|
||||||
LockPersonality=true
|
|
||||||
PrivateUsers=true
|
|
||||||
MemoryDenyWriteExecute=false
|
|
||||||
@@ -126,26 +126,12 @@ sub on_unknown_event
|
|||||||
if (!$bridgestate->{$room_id}->{gathered_candidates}) {
|
if (!$bridgestate->{$room_id}->{gathered_candidates}) {
|
||||||
$bridgestate->{$room_id}->{gathered_candidates} = 1;
|
$bridgestate->{$room_id}->{gathered_candidates} = 1;
|
||||||
my $offer = $bridgestate->{$room_id}->{offer};
|
my $offer = $bridgestate->{$room_id}->{offer};
|
||||||
my $candidate_block = {
|
my $candidate_block = "";
|
||||||
audio => '',
|
|
||||||
video => '',
|
|
||||||
};
|
|
||||||
foreach (@{$event->{content}->{candidates}}) {
|
foreach (@{$event->{content}->{candidates}}) {
|
||||||
if ($_->{sdpMid}) {
|
$candidate_block .= "a=" . $_->{candidate} . "\r\n";
|
||||||
$candidate_block->{$_->{sdpMid}} .= "a=" . $_->{candidate} . "\r\n";
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
$candidate_block->{audio} .= "a=" . $_->{candidate} . "\r\n";
|
|
||||||
$candidate_block->{video} .= "a=" . $_->{candidate} . "\r\n";
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
# XXX: collate using the right m= line - for now assume audio call
|
||||||
# XXX: assumes audio comes first
|
$offer =~ s/(a=rtcp.*[\r\n]+)/$1$candidate_block/;
|
||||||
#$offer =~ s/(a=rtcp-mux[\r\n]+)/$1$candidate_block->{audio}/;
|
|
||||||
#$offer =~ s/(a=rtcp-mux[\r\n]+)/$1$candidate_block->{video}/;
|
|
||||||
|
|
||||||
$offer =~ s/(m=video)/$candidate_block->{audio}$1/;
|
|
||||||
$offer =~ s/(.$)/$1\n$candidate_block->{video}$1/;
|
|
||||||
|
|
||||||
my $f = send_verto_json_request("verto.invite", {
|
my $f = send_verto_json_request("verto.invite", {
|
||||||
"sdp" => $offer,
|
"sdp" => $offer,
|
||||||
@@ -186,18 +172,23 @@ sub on_room_message
|
|||||||
warn "[Matrix] in $room_id: $from: " . $content->{body} . "\n";
|
warn "[Matrix] in $room_id: $from: " . $content->{body} . "\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
my $verto_connecting = $loop->new_future;
|
||||||
|
$bot_verto->connect(
|
||||||
|
%{ $CONFIG{"verto-bot"} },
|
||||||
|
on_connected => sub {
|
||||||
|
warn("[Verto] connected to websocket");
|
||||||
|
$verto_connecting->done($bot_verto) if not $verto_connecting->is_done;
|
||||||
|
},
|
||||||
|
on_connect_error => sub { die "Cannot connect to verto - $_[-1]" },
|
||||||
|
on_resolve_error => sub { die "Cannot resolve to verto - $_[-1]" },
|
||||||
|
);
|
||||||
|
|
||||||
Future->needs_all(
|
Future->needs_all(
|
||||||
$bot_matrix->login( %{ $CONFIG{"matrix-bot"} } )->then( sub {
|
$bot_matrix->login( %{ $CONFIG{"matrix-bot"} } )->then( sub {
|
||||||
$bot_matrix->start;
|
$bot_matrix->start;
|
||||||
}),
|
}),
|
||||||
|
|
||||||
$bot_verto->connect(
|
$verto_connecting,
|
||||||
%{ $CONFIG{"verto-bot"} },
|
|
||||||
on_connect_error => sub { die "Cannot connect to verto - $_[-1]" },
|
|
||||||
on_resolve_error => sub { die "Cannot resolve to verto - $_[-1]" },
|
|
||||||
)->on_done( sub {
|
|
||||||
warn("[Verto] connected to websocket");
|
|
||||||
}),
|
|
||||||
)->get;
|
)->get;
|
||||||
|
|
||||||
$loop->attach_signal(
|
$loop->attach_signal(
|
||||||
|
|||||||
@@ -86,7 +86,7 @@ sub create_virtual_user
|
|||||||
"user": "$localpart"
|
"user": "$localpart"
|
||||||
}
|
}
|
||||||
EOT
|
EOT
|
||||||
)->get;
|
)->get;
|
||||||
warn $response->as_string if ($response->code != 200);
|
warn $response->as_string if ($response->code != 200);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -266,21 +266,17 @@ my $as_url = $CONFIG{"matrix-bot"}->{as_url};
|
|||||||
|
|
||||||
Future->needs_all(
|
Future->needs_all(
|
||||||
$http->do_request(
|
$http->do_request(
|
||||||
method => "POST",
|
method => "POST",
|
||||||
uri => URI->new( $CONFIG{"matrix"}->{server}."/_matrix/appservice/v1/register" ),
|
uri => URI->new( $CONFIG{"matrix"}->{server}."/_matrix/appservice/v1/register" ),
|
||||||
content_type => "application/json",
|
content_type => "application/json",
|
||||||
content => <<EOT
|
content => <<EOT
|
||||||
{
|
{
|
||||||
"as_token": "$as_token",
|
"as_token": "$as_token",
|
||||||
"url": "$as_url",
|
"url": "$as_url",
|
||||||
"namespaces": { "users": [ { "regex": "\@\\\\+.*", "exclusive": false } ] }
|
"namespaces": { "users": ["\@\\\\+.*"] }
|
||||||
}
|
}
|
||||||
EOT
|
EOT
|
||||||
)->then( sub{
|
),
|
||||||
my ($response) = (@_);
|
|
||||||
warn $response->as_string if ($response->code != 200);
|
|
||||||
return Future->done;
|
|
||||||
}),
|
|
||||||
$verto_connecting,
|
$verto_connecting,
|
||||||
)->get;
|
)->get;
|
||||||
|
|
||||||
|
|||||||
@@ -7,9 +7,6 @@ matrix:
|
|||||||
matrix-bot:
|
matrix-bot:
|
||||||
user_id: '@vertobot:matrix.org'
|
user_id: '@vertobot:matrix.org'
|
||||||
password: ''
|
password: ''
|
||||||
domain: 'matrix.org"
|
|
||||||
as_url: 'http://localhost:8009'
|
|
||||||
as_token: 'vertobot123'
|
|
||||||
|
|
||||||
verto-bot:
|
verto-bot:
|
||||||
host: webrtc.freeswitch.org
|
host: webrtc.freeswitch.org
|
||||||
|
|||||||
@@ -11,4 +11,7 @@ requires 'YAML', 0;
|
|||||||
requires 'JSON', 0;
|
requires 'JSON', 0;
|
||||||
requires 'Getopt::Long', 0;
|
requires 'Getopt::Long', 0;
|
||||||
|
|
||||||
|
on 'test' => sub {
|
||||||
|
requires 'Test::More', '>= 0.98';
|
||||||
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,33 +0,0 @@
|
|||||||
# Creating multiple generic workers with a bash script
|
|
||||||
|
|
||||||
Setting up multiple worker configuration files manually can be time-consuming.
|
|
||||||
You can alternatively create multiple worker configuration files with a simple `bash` script. For example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
#!/bin/bash
|
|
||||||
for i in {1..5}
|
|
||||||
do
|
|
||||||
cat << EOF > generic_worker$i.yaml
|
|
||||||
worker_app: synapse.app.generic_worker
|
|
||||||
worker_name: generic_worker$i
|
|
||||||
|
|
||||||
# The replication listener on the main synapse process.
|
|
||||||
worker_replication_host: 127.0.0.1
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
|
|
||||||
worker_main_http_uri: http://localhost:8008/
|
|
||||||
|
|
||||||
worker_listeners:
|
|
||||||
- type: http
|
|
||||||
port: 808$i
|
|
||||||
resources:
|
|
||||||
- names: [client, federation]
|
|
||||||
|
|
||||||
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
|
|
||||||
EOF
|
|
||||||
done
|
|
||||||
```
|
|
||||||
|
|
||||||
This would create five generic workers with a unique `worker_name` field in each file and listening on ports 8081-8085.
|
|
||||||
|
|
||||||
Customise the script to your needs.
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user