Compare commits

..

No commits in common. "develop" and "v1.69.0rc1" have entirely different histories.

1013 changed files with 38057 additions and 88084 deletions

View File

@ -1,141 +0,0 @@
#!/usr/bin/env python
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
# compatible wheel, if so rename the wheel before repairing it.
import argparse
import os
import subprocess
from typing import Optional
from zipfile import ZipFile
from packaging.tags import Tag
from packaging.utils import parse_wheel_filename
from packaging.version import Version
def check_is_abi3_compatible(wheel_file: str) -> None:
"""Check the contents of the built wheel for any `.so` files that are *not*
abi3 compatible.
"""
with ZipFile(wheel_file, "r") as wheel:
for file in wheel.namelist():
if not file.endswith(".so"):
continue
if not file.endswith(".abi3.so"):
raise Exception(f"Found non-abi3 lib: {file}")
def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
"""Replaces the cpython wheel file with a ABI3 compatible wheel"""
if tag.abi == "abi3":
# Nothing to do.
return wheel_file
check_is_abi3_compatible(wheel_file)
# HACK: it seems that some older versions of pip will consider a wheel marked
# as macosx_11_0 as incompatible with Big Sur. I haven't done the full archaeology
# here; there are some clues in
# https://github.com/pantsbuild/pants/pull/12857
# https://github.com/pypa/pip/issues/9138
# https://github.com/pypa/packaging/pull/319
# Empirically this seems to work, note that macOS 11 and 10.16 are the same,
# both versions are valid for backwards compatibility.
platform = tag.platform.replace("macosx_11_0", "macosx_10_16")
abi3_tag = Tag(tag.interpreter, "abi3", platform)
dirname = os.path.dirname(wheel_file)
new_wheel_file = os.path.join(
dirname,
f"{name}-{version}-{abi3_tag}.whl",
)
os.rename(wheel_file, new_wheel_file)
print("Renamed wheel to", new_wheel_file)
return new_wheel_file
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
"""Entry point"""
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
# normalizes the package name (i.e. it converts matrix_synapse ->
# matrix-synapse), which is not what we want.
_, version, build, tags = parse_wheel_filename(os.path.basename(wheel_file))
name = os.path.basename(wheel_file).split("-")[0]
if len(tags) != 1:
# We expect only a wheel file with only a single tag
raise Exception(f"Unexpectedly found multiple tags: {tags}")
tag = next(iter(tags))
if build:
# We don't use build tags in Synapse
raise Exception(f"Unexpected build tag: {build}")
# If the wheel is for cpython then convert it into an abi3 wheel.
if tag.interpreter.startswith("cp"):
wheel_file = cpython(wheel_file, name, version, tag)
# Finally, repair the wheel.
if archs is not None:
# If we are given archs then we are on macos and need to use
# `delocate-listdeps`.
subprocess.run(["delocate-listdeps", wheel_file], check=True)
subprocess.run(
["delocate-wheel", "--require-archs", archs, "-w", dest_dir, wheel_file],
check=True,
)
else:
subprocess.run(["auditwheel", "repair", "-w", dest_dir, wheel_file], check=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Tag wheel as abi3 and repair it.")
parser.add_argument(
"--wheel-dir",
"-w",
metavar="WHEEL_DIR",
help="Directory to store delocated wheels",
required=True,
)
parser.add_argument(
"--require-archs",
metavar="archs",
default=None,
)
parser.add_argument(
"wheel_file",
metavar="WHEEL_FILE",
)
args = parser.parse_args()
wheel_file = args.wheel_file
wheel_dir = args.wheel_dir
archs = args.require_archs
main(wheel_file, wheel_dir, archs)

View File

@ -18,23 +18,15 @@
import json import json
import os import os
def set_output(key: str, value: str):
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
print(f"{key}={value}", file=f)
IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/") IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
# First calculate the various trial jobs. # First calculate the various trial jobs.
# #
# For PRs, we only run each type of test with the oldest Python version supported (which # For each type of test we only run on Py3.7 on PRs
# is Python 3.8 right now)
trial_sqlite_tests = [ trial_sqlite_tests = [
{ {
"python-version": "3.8", "python-version": "3.7",
"database": "sqlite", "database": "sqlite",
"extras": "all", "extras": "all",
} }
@ -47,14 +39,15 @@ if not IS_PR:
"database": "sqlite", "database": "sqlite",
"extras": "all", "extras": "all",
} }
for version in ("3.9", "3.10", "3.11", "3.12") for version in ("3.8", "3.9", "3.10")
) )
trial_postgres_tests = [ trial_postgres_tests = [
{ {
"python-version": "3.8", "python-version": "3.7",
"database": "postgres", "database": "postgres",
"postgres-version": "11", "postgres-version": "10",
"extras": "all", "extras": "all",
} }
] ]
@ -62,16 +55,16 @@ trial_postgres_tests = [
if not IS_PR: if not IS_PR:
trial_postgres_tests.append( trial_postgres_tests.append(
{ {
"python-version": "3.12", "python-version": "3.10",
"database": "postgres", "database": "postgres",
"postgres-version": "16", "postgres-version": "14",
"extras": "all", "extras": "all",
} }
) )
trial_no_extra_tests = [ trial_no_extra_tests = [
{ {
"python-version": "3.8", "python-version": "3.7",
"database": "sqlite", "database": "sqlite",
"extras": "", "extras": "",
} }
@ -88,7 +81,7 @@ print("::endgroup::")
test_matrix = json.dumps( test_matrix = json.dumps(
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests
) )
set_output("trial_test_matrix", test_matrix) print(f"::set-output name=trial_test_matrix::{test_matrix}")
# First calculate the various sytest jobs. # First calculate the various sytest jobs.
@ -109,30 +102,20 @@ sytest_tests = [
"postgres": "multi-postgres", "postgres": "multi-postgres",
"workers": "workers", "workers": "workers",
}, },
{
"sytest-tag": "focal",
"postgres": "multi-postgres",
"workers": "workers",
"reactor": "asyncio",
},
] ]
if not IS_PR: if not IS_PR:
sytest_tests.extend( sytest_tests.extend(
[ [
{
"sytest-tag": "focal",
"reactor": "asyncio",
},
{
"sytest-tag": "focal",
"postgres": "postgres",
"reactor": "asyncio",
},
{ {
"sytest-tag": "testing", "sytest-tag": "testing",
"postgres": "postgres", "postgres": "postgres",
}, },
{
"sytest-tag": "buster",
"postgres": "multi-postgres",
"workers": "workers",
},
] ]
) )
@ -142,4 +125,4 @@ print(json.dumps(sytest_tests, indent=4))
print("::endgroup::") print("::endgroup::")
test_matrix = json.dumps(sytest_tests) test_matrix = json.dumps(sytest_tests)
set_output("sytest_test_matrix", test_matrix) print(f"::set-output name=sytest_test_matrix::{test_matrix}")

View File

@ -1,23 +0,0 @@
#! /usr/bin/env python
import sys
if sys.version_info < (3, 11):
raise RuntimeError("Requires at least Python 3.11, to import tomllib")
import tomllib
with open("poetry.lock", "rb") as f:
lockfile = tomllib.load(f)
try:
lock_version = lockfile["metadata"]["lock-version"]
assert lock_version == "2.0"
except Exception:
print(
"""\
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
https://matrix-org.github.io/synapse/develop/development/dependencies.html
"""
)
raise

View File

@ -31,6 +31,34 @@ sed -i \
-e '/systemd/d' \ -e '/systemd/d' \
pyproject.toml pyproject.toml
# Use poetry to do the installation. This ensures that the versions are all mutually
# compatible (as far the package metadata declares, anyway); pip's package resolver
# is more lax.
#
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
# toml file. This means we don't have to ensure compatibility between old deps and
# dev tools.
pip install toml wheel
REMOVE_DEV_DEPENDENCIES="
import toml
with open('pyproject.toml', 'r') as f:
data = toml.loads(f.read())
del data['tool']['poetry']['dev-dependencies']
with open('pyproject.toml', 'w') as f:
toml.dump(data, f)
"
python3 -c "$REMOVE_DEV_DEPENDENCIES"
pip install poetry==1.2.0
poetry lock
echo "::group::Patched pyproject.toml" echo "::group::Patched pyproject.toml"
cat pyproject.toml cat pyproject.toml
echo "::endgroup::" echo "::endgroup::"
echo "::group::Lockfile after patch"
cat poetry.lock
echo "::endgroup::"

View File

@ -9,9 +9,19 @@ set -eu
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func' alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func' alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
block Set Go Version
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
# Add the Go path to the PATH: We need this so we can call gotestfmt
echo "~/go/bin" >> $GITHUB_PATH
endblock
block Install Complement Dependencies block Install Complement Dependencies
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
go install -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
endblock endblock
block Install custom gotestfmt template block Install custom gotestfmt template

View File

@ -23,9 +23,8 @@ poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-dat
--output-directory /tmp/export_data --output-directory /tmp/export_data
# Test that the output directory exists and contains the rooms directory # Test that the output directory exists and contains the rooms directory
dir_r="/tmp/export_data/rooms" dir="/tmp/export_data/rooms"
dir_u="/tmp/export_data/user_data" if [ -d "$dir" ]; then
if [ -d "$dir_r" ] && [ -d "$dir_u" ]; then
echo "Command successful, this test passes" echo "Command successful, this test passes"
else else
echo "No output directories found, the command fails against a sqlite database." echo "No output directories found, the command fails against a sqlite database."
@ -44,9 +43,8 @@ poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-d
--output-directory /tmp/export_data2 --output-directory /tmp/export_data2
# Test that the output directory exists and contains the rooms directory # Test that the output directory exists and contains the rooms directory
dir_r2="/tmp/export_data2/rooms" dir2="/tmp/export_data2/rooms"
dir_u2="/tmp/export_data2/user_data" if [ -d "$dir2" ]; then
if [ -d "$dir_r2" ] && [ -d "$dir_u2" ]; then
echo "Command successful, this test passes" echo "Command successful, this test passes"
else else
echo "No output directories found, the command fails against a postgres database." echo "No output directories found, the command fails against a postgres database."

View File

@ -9,7 +9,6 @@
!pyproject.toml !pyproject.toml
!poetry.lock !poetry.lock
!Cargo.lock !Cargo.lock
!Cargo.toml
!build_rust.py !build_rust.py
rust/target rust/target

View File

@ -4,7 +4,7 @@
root = true root = true
# 4 space indentation # 4 space indentation
[*.{py,pyi}] [*.py]
indent_style = space indent_style = space
indent_size = 4 indent_size = 4
max_line_length = 88 max_line_length = 88

11
.flake8 Normal file
View File

@ -0,0 +1,11 @@
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
# See https://github.com/PyCQA/flake8/issues/234
[flake8]
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
# for error codes. The ones we ignore are:
# W503: line break before binary operator
# W504: line break after binary operator
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
ignore=W503,W504,E203,E731,E501

View File

@ -8,21 +8,17 @@
# If ignoring a pull request that was not squash merged, only the merge # If ignoring a pull request that was not squash merged, only the merge
# commit needs to be put here. Child commits will be resolved from it. # commit needs to be put here. Child commits will be resolved from it.
# Run black (https://github.com/matrix-org/synapse/pull/3679). # Run black (#3679).
8b3d9b6b199abb87246f982d5db356f1966db925 8b3d9b6b199abb87246f982d5db356f1966db925
# Black reformatting (https://github.com/matrix-org/synapse/pull/5482). # Black reformatting (#5482).
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3 32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
# Target Python 3.5 with black (https://github.com/matrix-org/synapse/pull/8664). # Target Python 3.5 with black (#8664).
aff1eb7c671b0a3813407321d2702ec46c71fa56 aff1eb7c671b0a3813407321d2702ec46c71fa56
# Update black to 20.8b1 (https://github.com/matrix-org/synapse/pull/9381). # Update black to 20.8b1 (#9381).
0a00b7ff14890987f09112a2ae696c61001e6cf1 0a00b7ff14890987f09112a2ae696c61001e6cf1
# Convert tests/rest/admin/test_room.py to unix file endings (https://github.com/matrix-org/synapse/pull/7953). # Convert tests/rest/admin/test_room.py to unix file endings (#7953).
c4268e3da64f1abb5b31deaeb5769adb6510c0a7 c4268e3da64f1abb5b31deaeb5769adb6510c0a7
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
9bb2eac71962970d02842bca441f4bcdbbf93a11

View File

@ -74,36 +74,6 @@ body:
- Debian packages from packages.matrix.org - Debian packages from packages.matrix.org
- pip (from PyPI) - pip (from PyPI)
- Other (please mention below) - Other (please mention below)
- I don't know
validations:
required: true
- type: input
id: database
attributes:
label: Database
description: |
Are you using SQLite or PostgreSQL? What's the version of your database?
If PostgreSQL, please also answer the following:
- are you using a single PostgreSQL server
or [separate servers for `main` and `state`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#databases)?
- have you previously ported from SQLite using the Synapse "portdb" script?
- have you previously restored from a backup?
validations:
required: true
- type: dropdown
id: workers
attributes:
label: Workers
description: |
Are you running a single Synapse process, or are you running
[2 or more workers](https://matrix-org.github.io/synapse/latest/workers.html)?
options:
- Single process
- Multiple workers
- I don't know
validations:
required: true
- type: textarea - type: textarea
id: platform id: platform
attributes: attributes:
@ -113,28 +83,17 @@ body:
e.g. distro, hardware, if it's running in a vm/container, etc. e.g. distro, hardware, if it's running in a vm/container, etc.
validations: validations:
required: true required: true
- type: textarea
id: config
attributes:
label: Configuration
description: |
Do you have any unusual config options turned on? If so, please provide details.
- Experimental or undocumented features
- [Presence](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#presence)
- [Message retention](https://matrix-org.github.io/synapse/latest/message_retention_policies.html)
- [Synapse modules](https://matrix-org.github.io/synapse/latest/modules/index.html)
- type: textarea - type: textarea
id: logs id: logs
attributes: attributes:
label: Relevant log output label: Relevant log output
description: | description: |
Please copy and paste any relevant log output as text (not images), ideally at INFO or DEBUG log level. Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
This will be automatically formatted into code, so there is no need for backticks (`\``). This will be automatically formatted into code, so there is no need for backticks.
Please be careful to remove any personal or private data. Please be careful to remove any personal or private data.
**Bug reports are usually impossible to diagnose without logging.** **Bug reports are usually very difficult to diagnose without logging.**
render: shell render: shell
validations: validations:
required: true required: true

View File

@ -15,9 +15,3 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: "weekly" interval: "weekly"
- package-ecosystem: "cargo"
directory: "/"
versioning-strategy: "lockfile-only"
schedule:
interval: "weekly"

View File

@ -0,0 +1,46 @@
name: Write changelog for dependabot PR
on:
pull_request:
types:
- opened
- reopened # For debugging!
permissions:
# Needed to be able to push the commit. See
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
# for a similar example
contents: write
jobs:
add-changelog:
runs-on: 'ubuntu-latest'
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
- name: Write, commit and push changelog
run: |
echo "${{ github.event.pull_request.title }}." > "changelog.d/${{ github.event.pull_request.number }}".misc
git add changelog.d
git config user.email "github-actions[bot]@users.noreply.github.com"
git config user.name "GitHub Actions"
git commit -m "Changelog"
git push
shell: bash
# The `git push` above does not trigger CI on the dependabot PR.
#
# By default, workflows can't trigger other workflows when they're just using the
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
# recursive workflow loops by accident, because that'll get very expensive very
# quickly.) Instead, you have to manually call out to another workflow, or else
# make your changes (i.e. the `git push` above) using a personal access token.
# See
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
#
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
# See git commit history for previous attempts. If anyone desperately wants to try
# again in the future, make a matrix-bot account and use its access token to git push.
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
# are sufficiently locked down to dependabot only as above.

View File

@ -10,7 +10,6 @@ on:
permissions: permissions:
contents: read contents: read
packages: write
jobs: jobs:
build: build:
@ -18,47 +17,28 @@ jobs:
steps: steps:
- name: Set up QEMU - name: Set up QEMU
id: qemu id: qemu
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v2
with: with:
platforms: arm64 platforms: arm64
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v2
- name: Inspect builder - name: Inspect builder
run: docker buildx inspect run: docker buildx inspect
- name: Checkout repository
uses: actions/checkout@v4
- name: Extract version from pyproject.toml
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell
shell: bash
run: |
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
- name: Log in to DockerHub - name: Log in to DockerHub
uses: docker/login-action@v3 uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Calculate docker image tag - name: Calculate docker image tag
id: set-tag id: set-tag
uses: docker/metadata-action@master uses: docker/metadata-action@master
with: with:
images: | images: matrixdotorg/synapse
docker.io/matrixdotorg/synapse
ghcr.io/matrix-org/synapse
flavor: | flavor: |
latest=false latest=false
tags: | tags: |
@ -68,17 +48,10 @@ jobs:
type=pep440,pattern={{raw}} type=pep440,pattern={{raw}}
- name: Build and push all platforms - name: Build and push all platforms
uses: docker/build-push-action@v5 uses: docker/build-push-action@v3
with: with:
push: true push: true
labels: | labels: "gitsha1=${{ github.sha }}"
gitsha1=${{ github.sha }}
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
tags: "${{ steps.set-tag.outputs.tags }}" tags: "${{ steps.set-tag.outputs.tags }}"
file: "docker/Dockerfile" file: "docker/Dockerfile"
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
# arm64 builds OOM without the git fetch setting. c.f.
# https://github.com/rust-lang/cargo/issues/10583
build-args: |
CARGO_NET_GIT_FETCH_WITH_CLI=true

View File

@ -1,90 +0,0 @@
name: Add Version Picker (RUN ONCE)
on:
workflow_dispatch:
jobs:
add-version-picker:
name: Add Version Picker
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Configure Git
run: |
git config user.email "action@synapse.bot.com"
git config user.name "Action Bot"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
with:
mdbook-version: '0.4.17'
- name: Copy files to release branches
run: |
for version in "v1.98" "v1.97" "v1.96" "v1.95" "v1.94" "v1.93" "v1.92" "v1.91" "v1.90" "v1.89" "v1.88" "v1.87" "v1.86" "v1.85" "v1.84" "v1.83" "v1.82" "v1.81" "v1.80" "v1.79" "v1.78" "v1.77" "v1.76" "v1.75" "v1.74" "v1.73" "v1.72" "v1.71" "v1.70" "v1.69" "v1.68" "v1.67" "v1.66" "v1.65" "v1.64" "v1.63" "v1.62" "v1.61" "v1.60" "v1.59" "v1.58" "v1.57" "v1.56" "v1.55" "v1.54" "v1.53" "v1.52" "v1.51" "v1.50" "v1.49" "v1.48" "v1.47" "v1.46" "v1.45" "v1.44" "v1.43" "v1.42" "v1.41" "v1.40" "v1.39" "v1.38" "v1.37"
do
git fetch
git checkout -b release-$version origin/release-$version
git checkout develop -- ./book.toml
git checkout develop -- ./docs/website_files/version-picker.js
git checkout develop -- ./docs/website_files/version-picker.css
git checkout develop -- ./docs/website_files/README.md
echo "window.SYNAPSE_VERSION = '$version';" > ./docs/website_files/version.js
# Adding version-picker element to index.hbs
awk '/<button id="search-toggle" class="icon-button" type="button" title="Search. \(Shortkey: s\)" aria-label="Toggle Searchbar" aria-expanded="false" aria-keyshortcuts="S" aria-controls="searchbar">/{
print; getline; print; getline; print; getline; print;
print "\
<div class=\"version-picker\">\n\
<div class=\"dropdown\">\n\
<div class=\"select\">\n\
<span></span>\n\
<i class=\"fa fa-chevron-down\"></i>\n\
</div>\n\
<input type=\"hidden\" name=\"version\">\n\
<ul class=\"dropdown-menu\">\n\
<!-- Versions will be added dynamically in version-picker.js -->\n\
</ul>\n\
</div>\n\
</div>\
";
next
} 1' ./docs/website_files/theme/index.hbs > output.html && mv output.html ./docs/website_files/theme/index.hbs
git add ./book.toml ./docs/website_files/version-picker.js ./docs/website_files/version-picker.css ./docs/website_files/version.js ./docs/website_files/README.md ./docs/website_files/theme/index.hbs
git commit -m "Version picker added for $version docs"
git push
done
- name: Build docs for Github Pages
run: |
git fetch
git branch gh-pages origin/gh-pages
for version in "v1.98" "v1.97" "v1.96" "v1.95" "v1.94" "v1.93" "v1.92" "v1.91" "v1.90" "v1.89" "v1.88" "v1.87" "v1.86" "v1.85" "v1.84" "v1.83" "v1.82" "v1.81" "v1.80" "v1.79" "v1.78" "v1.77" "v1.76" "v1.75" "v1.74" "v1.73" "v1.72" "v1.71" "v1.70" "v1.69" "v1.68" "v1.67" "v1.66" "v1.65" "v1.64" "v1.63" "v1.62" "v1.61" "v1.60" "v1.59" "v1.58" "v1.57" "v1.56" "v1.55" "v1.54" "v1.53" "v1.52" "v1.51" "v1.50" "v1.49" "v1.48" "v1.47" "v1.46" "v1.45" "v1.44" "v1.43" "v1.42" "v1.41" "v1.40" "v1.39" "v1.38" "v1.37"
do
git checkout release-$version
mdbook build && cp book/welcome_and_overview.html book/index.html
mkdir ver-temp && cp -r book/* ver-temp/
rm -r ./book
git checkout gh-pages
rm -r $version
mv ver-temp $version
git add ./$version
git commit -m "Version picker deployed for $version docs to Github Pages"
done
- name: Push to gh-pages
run: |
git checkout gh-pages
git status
git push

View File

@ -1,34 +0,0 @@
name: Deploy documentation PR preview
on:
workflow_run:
workflows: [ "Prepare documentation PR preview" ]
types:
- completed
jobs:
netlify:
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
runs-on: ubuntu-latest
steps:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@268677152d06ba59fcec7a7f0b5d961b6ccd7e1e # v2.28.0
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}
name: book
path: book
- name: 📤 Deploy to Netlify
uses: matrix-org/netlify-pr-preview@v3
with:
path: book
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
branch: ${{ github.event.workflow_run.head_branch }}
revision: ${{ github.event.workflow_run.head_sha }}
token: ${{ secrets.NETLIFY_AUTH_TOKEN }}
site_id: ${{ secrets.NETLIFY_SITE_ID }}
desc: Documentation preview
deployment_env: PR Documentation Preview

View File

@ -1,71 +0,0 @@
name: Prepare documentation PR preview
on:
pull_request:
paths:
- docs/**
- book.toml
- .github/workflows/docs-pr.yaml
- scripts-dev/schema_versions.py
jobs:
pages:
name: GitHub Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
# Fetch all history so that the schema_versions script works.
fetch-depth: 0
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
with:
mdbook-version: '0.4.17'
- name: Setup python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
- name: Build the documentation
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
# However, we're using docs/README.md for other purposes and need to pick a new page
# as the default. Let's opt for the welcome page instead.
run: |
mdbook build
cp book/welcome_and_overview.html book/index.html
- name: Upload Artifact
uses: actions/upload-artifact@v3
with:
name: book
path: book
# We'll only use this in a workflow_run, then we're done with it
retention-days: 1
link-check:
name: Check links in documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
with:
mdbook-version: '0.4.17'
- name: Setup htmltest
run: |
wget https://github.com/wjdp/htmltest/releases/download/v0.17.0/htmltest_0.17.0_linux_amd64.tar.gz
echo '775c597ee74899d6002cd2d93076f897f4ba68686bceabe2e5d72e84c57bc0fb htmltest_0.17.0_linux_amd64.tar.gz' | sha256sum -c
tar zxf htmltest_0.17.0_linux_amd64.tar.gz
- name: Test links with htmltest
# Build the book with `./` as the site URL (to make checks on 404.html possible)
# Then run htmltest (without checking external links since that involves the network and is slow).
run: |
MDBOOK_OUTPUT__HTML__SITE_URL="./" mdbook build
./htmltest book --skip-external

View File

@ -13,10 +13,25 @@ on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
pre: pages:
name: Calculate variables for GitHub Pages deployment name: GitHub Pages
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3
- name: Setup mdbook
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
with:
mdbook-version: '0.4.17'
- name: Build the documentation
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
# However, we're using docs/README.md for other purposes and need to pick a new page
# as the default. Let's opt for the welcome page instead.
run: |
mdbook build
cp book/welcome_and_overview.html book/index.html
# Figure out the target directory. # Figure out the target directory.
# #
# The target directory depends on the name of the branch # The target directory depends on the name of the branch
@ -39,79 +54,12 @@ jobs:
esac esac
# finally, set the 'branch-version' var. # finally, set the 'branch-version' var.
echo "branch-version=$branch" >> "$GITHUB_OUTPUT" echo "::set-output name=branch-version::$branch"
outputs:
branch-version: ${{ steps.vars.outputs.branch-version }}
################################################################################
pages-docs:
name: GitHub Pages
runs-on: ubuntu-latest
needs:
- pre
steps:
- uses: actions/checkout@v4
with:
# Fetch all history so that the schema_versions script works.
fetch-depth: 0
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
with:
mdbook-version: '0.4.17'
- name: Set version of docs
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
- name: Setup python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
- name: Build the documentation
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
# However, we're using docs/README.md for other purposes and need to pick a new page
# as the default. Let's opt for the welcome page instead.
run: |
mdbook build
cp book/welcome_and_overview.html book/index.html
# Deploy to the target directory. # Deploy to the target directory.
- name: Deploy to gh pages - name: Deploy to gh pages
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
with: with:
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./book publish_dir: ./book
destination_dir: ./${{ needs.pre.outputs.branch-version }} destination_dir: ./${{ steps.vars.outputs.branch-version }}
################################################################################
pages-devdocs:
name: GitHub Pages (developer docs)
runs-on: ubuntu-latest
needs:
- pre
steps:
- uses: actions/checkout@v4
- name: "Set up Sphinx"
uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
poetry-version: "1.3.2"
groups: "dev-docs"
extras: ""
- name: Build the documentation
run: |
cd dev-docs
poetry run make html
# Deploy to the target directory.
- name: Deploy to gh pages
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./dev-docs/_build/html
destination_dir: ./dev-docs/${{ needs.pre.outputs.branch-version }}

View File

@ -1,52 +0,0 @@
# A helper workflow to automatically fixup any linting errors on a PR. Must be
# triggered manually.
name: Attempt to automatically fix linting errors
on:
workflow_dispatch:
jobs:
fixup:
name: Fix up
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
# We use nightly so that `fmt` correctly groups together imports, and
# clippy correctly fixes up the benchmarks.
toolchain: nightly-2022-12-01
components: rustfmt
- uses: Swatinem/rust-cache@v2
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@v1
with:
install-project: "false"
- name: Import order (isort)
continue-on-error: true
run: poetry run isort .
- name: Code style (black)
continue-on-error: true
run: poetry run black .
- name: Semantic checks (ruff)
continue-on-error: true
run: poetry run ruff --fix .
- run: cargo clippy --all-features --fix -- -D warnings
continue-on-error: true
- run: cargo fmt
continue-on-error: true
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "Attempt to fix linting"

View File

@ -22,26 +22,15 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
check_repo:
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
# only useful to the Synapse core team.
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
# of the workflow will be skipped as well.
runs-on: ubuntu-latest
outputs:
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
steps:
- id: check_condition
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
mypy: mypy:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@stable uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
# The dev dependencies aren't exposed in the wheel metadata (at least with current # The dev dependencies aren't exposed in the wheel metadata (at least with current
@ -49,7 +38,7 @@ jobs:
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: "3.x" python-version: "3.x"
poetry-version: "1.3.2" poetry-version: "1.2.0"
extras: "all" extras: "all"
# Dump installed versions for debugging. # Dump installed versions for debugging.
- run: poetry run pip list > before.txt - run: poetry run pip list > before.txt
@ -57,12 +46,10 @@ jobs:
# `pip install matrix-synapse[all]` as closely as possible. # `pip install matrix-synapse[all]` as closely as possible.
- run: poetry update --no-dev - run: poetry update --no-dev
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true) - run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
- name: Remove unhelpful options from mypy config - name: Remove warn_unused_ignores from mypy config
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini run: sed '/warn_unused_ignores = True/d' -i mypy.ini
- run: poetry run mypy - run: poetry run mypy
trial: trial:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
@ -72,10 +59,13 @@ jobs:
postgres-version: "14" postgres-version: "14"
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@stable uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- run: sudo apt-get -qq install xmlsec1 - run: sudo apt-get -qq install xmlsec1
@ -86,7 +76,7 @@ jobs:
-e POSTGRES_PASSWORD=postgres \ -e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }} postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@v5 - uses: actions/setup-python@v2
with: with:
python-version: "3.x" python-version: "3.x"
- run: pip install .[all,test] - run: pip install .[all,test]
@ -121,8 +111,6 @@ jobs:
sytest: sytest:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
image: matrixdotorg/sytest-synapse:testing image: matrixdotorg/sytest-synapse:testing
@ -145,10 +133,13 @@ jobs:
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }} BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@stable uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- name: Ensure sytest runs `pip install` - name: Ensure sytest runs `pip install`
@ -174,8 +165,7 @@ jobs:
complement: complement:
needs: check_repo if: "${{ !failure() && !cancelled() }}"
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
@ -192,19 +182,14 @@ jobs:
database: Postgres database: Postgres
steps: steps:
- name: Run actions/checkout@v4 for synapse - name: Run actions/checkout@v3 for synapse
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
path: synapse path: synapse
- name: Prepare Complement's Prerequisites - name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@v5
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
- run: | - run: |
set -o pipefail set -o pipefail
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
@ -214,7 +199,7 @@ jobs:
# Open an issue if the build fails, so we know about it. # Open an issue if the build fails, so we know about it.
# Only do this if we're not experimenting with this action in a PR. # Only do this if we're not experimenting with this action in a PR.
open-issue: open-issue:
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'" if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
needs: needs:
# TODO: should mypy be included here? It feels more brittle than the others. # TODO: should mypy be included here? It feels more brittle than the others.
- mypy - mypy
@ -225,8 +210,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1 - uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:

View File

@ -1,24 +0,0 @@
on:
push:
branches: ["develop", "release-*"]
paths:
- poetry.lock
pull_request:
paths:
- poetry.lock
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
check-sdists:
name: "Check locked dependencies have sdists"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.x'
- run: pip install tomli
- run: ./scripts-dev/check_locked_deps_have_sdists.py

View File

@ -1,74 +0,0 @@
# This task does not run complement tests, see tests.yaml instead.
# This task does not build docker images for synapse for use on docker hub, see docker.yaml instead
name: Store complement-synapse image in ghcr.io
on:
push:
branches: [ "master" ]
schedule:
- cron: '0 5 * * *'
workflow_dispatch:
inputs:
branch:
required: true
default: 'develop'
type: choice
options:
- develop
- master
# Only run this action once per pull request/branch; restart if a new commit arrives.
# C.f. https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
# and https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
name: Build and push complement image
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout specific branch (debug build)
uses: actions/checkout@v4
if: github.event_name == 'workflow_dispatch'
with:
ref: ${{ inputs.branch }}
- name: Checkout clean copy of develop (scheduled build)
uses: actions/checkout@v4
if: github.event_name == 'schedule'
with:
ref: develop
- name: Checkout clean copy of master (on-push)
uses: actions/checkout@v4
if: github.event_name == 'push'
with:
ref: master
- name: Login to registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Work out labels for complement image
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/${{ github.repository }}/complement-synapse
tags: |
type=schedule,pattern=nightly,enable=${{ github.event_name == 'schedule'}}
type=raw,value=develop,enable=${{ github.event_name == 'schedule' || inputs.branch == 'develop' }}
type=raw,value=latest,enable=${{ github.event_name == 'push' || inputs.branch == 'master' }}
type=sha,format=long
- name: Run scripts-dev/complement.sh to generate complement-synapse:latest image.
run: scripts-dev/complement.sh --build-only
- name: Tag and push generated image
run: |
for TAG in ${{ join(fromJson(steps.meta.outputs.json).tags, ' ') }}; do
echo "tag and push $TAG"
docker tag complement-synapse $TAG
docker push $TAG
done

View File

@ -4,15 +4,13 @@ name: Build release artifacts
on: on:
# we build on PRs and develop to (hopefully) get early warning # we build on PRs and develop to (hopefully) get early warning
# of things breaking (but only build one set of debs). PRs skip # of things breaking (but only build one set of debs)
# building wheels on macOS & ARM.
pull_request: pull_request:
push: push:
branches: ["develop", "release-*"] branches: ["develop", "release-*"]
# we do the full build on tags. # we do the full build on tags.
tags: ["v*"] tags: ["v*"]
merge_group:
workflow_dispatch: workflow_dispatch:
concurrency: concurrency:
@ -27,19 +25,16 @@ jobs:
name: "Calculate list of debian distros" name: "Calculate list of debian distros"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-python@v5 - uses: actions/setup-python@v2
with:
python-version: '3.x'
- id: set-distros - id: set-distros
run: | run: |
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid # if we're running from a tag, get the full list of distros; otherwise just use debian:sid
# NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path
dists='["debian:sid"]' dists='["debian:sid"]'
if [[ $GITHUB_REF == refs/tags/* ]]; then if [[ $GITHUB_REF == refs/tags/* ]]; then
dists=$(scripts-dev/build_debian_packages.py --show-dists-json) dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
fi fi
echo "distros=$dists" >> "$GITHUB_OUTPUT" echo "::set-output name=distros::$dists"
# map the step outputs to job outputs # map the step outputs to job outputs
outputs: outputs:
distros: ${{ steps.set-distros.outputs.distros }} distros: ${{ steps.set-distros.outputs.distros }}
@ -55,13 +50,13 @@ jobs:
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
path: src path: src
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v2
with: with:
install: true install: true
@ -74,9 +69,7 @@ jobs:
${{ runner.os }}-buildx- ${{ runner.os }}-buildx-
- name: Set up python - name: Set up python
uses: actions/setup-python@v5 uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Build the packages - name: Build the packages
# see https://github.com/docker/build-push-action/issues/252 # see https://github.com/docker/build-push-action/issues/252
@ -98,63 +91,39 @@ jobs:
path: debs/* path: debs/*
build-wheels: build-wheels:
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }} name: Build wheels on ${{ matrix.os }}
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
os: [ubuntu-20.04, macos-11] os: [ubuntu-20.04, macos-10.15]
arch: [x86_64, aarch64]
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
# It is not read by the rest of the workflow.
is_pr: is_pr:
- ${{ startsWith(github.ref, 'refs/pull/') }} - ${{ startsWith(github.ref, 'refs/pull/') }}
exclude: exclude:
# Don't build macos wheels on PR CI. # Don't build macos wheels on PR CI.
- is_pr: true - is_pr: true
os: "macos-11" os: "macos-10.15"
# Don't build aarch64 wheels on mac.
- os: "macos-11"
arch: aarch64
# Don't build aarch64 wheels on PR CI.
- is_pr: true
arch: aarch64
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-python@v5 - uses: actions/setup-python@v3
with:
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
# here, because `python` on osx points to Python 2.7.
python-version: "3.x"
- name: Install cibuildwheel - name: Install cibuildwheel
run: python -m pip install cibuildwheel==2.16.2 run: python -m pip install cibuildwheel==2.9.0 poetry==1.2.0
- name: Set up QEMU to emulate aarch64 # Only build a single wheel in CI.
if: matrix.arch == 'aarch64' - name: Set env vars.
uses: docker/setup-qemu-action@v3 run: |
with: echo "CIBW_BUILD="cp37-manylinux_x86_64"" >> $GITHUB_ENV
platforms: arm64
- name: Build aarch64 wheels
if: matrix.arch == 'aarch64'
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
- name: Only build a single wheel on PR
if: startsWith(github.ref, 'refs/pull/') if: startsWith(github.ref, 'refs/pull/')
run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
- name: Build wheels - name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse run: python -m cibuildwheel --output-dir wheelhouse
env: env:
# Skip testing for platforms which various libraries don't have wheels # Skip testing for platforms which various libraries don't have wheels
# for, and so need extra build deps. # for, and so need extra build deps.
CIBW_TEST_SKIP: pp3*-* *i686* *musl* CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx*
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
CARGO_NET_GIT_FETCH_WITH_CLI: true
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v3
with: with:
@ -167,8 +136,8 @@ jobs:
if: ${{ !startsWith(github.ref, 'refs/pull/') }} if: ${{ !startsWith(github.ref, 'refs/pull/') }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-python@v5 - uses: actions/setup-python@v4
with: with:
python-version: '3.10' python-version: '3.10'

View File

@ -4,7 +4,6 @@ on:
push: push:
branches: ["develop", "release-*"] branches: ["develop", "release-*"]
pull_request: pull_request:
merge_group:
workflow_dispatch: workflow_dispatch:
concurrency: concurrency:
@ -12,19 +11,12 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
check-signoff:
if: "github.event_name == 'pull_request'"
uses: "matrix-org/backend-meta/.github/workflows/sign-off.yml@v2"
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that # Job to detect what has changed so we don't run e.g. Rust checks on PRs that
# don't modify Rust code. # don't modify Rust code.
changes: changes:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }} rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
steps: steps:
- uses: dorny/paths-filter@v2 - uses: dorny/paths-filter@v2
id: filter id: filter
@ -35,154 +27,35 @@ jobs:
rust: rust:
- 'rust/**' - 'rust/**'
- 'Cargo.toml' - 'Cargo.toml'
- 'Cargo.lock'
- '.rustfmt.toml'
- '.github/workflows/tests.yml'
trial:
- 'synapse/**'
- 'tests/**'
- 'rust/**'
- '.ci/scripts/calculate_jobs.py'
- 'Cargo.toml'
- 'Cargo.lock'
- 'pyproject.toml'
- 'poetry.lock'
- '.github/workflows/tests.yml'
integration:
- 'synapse/**'
- 'rust/**'
- 'docker/**'
- 'Cargo.toml'
- 'Cargo.lock'
- 'pyproject.toml'
- 'poetry.lock'
- 'docker/**'
- '.ci/**'
- 'scripts-dev/complement.sh'
- '.github/workflows/tests.yml'
linting:
- 'synapse/**'
- 'docker/**'
- 'tests/**'
- 'scripts-dev/**'
- 'contrib/**'
- 'synmark/**'
- 'stubs/**'
- '.ci/**'
- 'mypy.ini'
- 'pyproject.toml'
- 'poetry.lock'
- '.github/workflows/tests.yml'
check-sampleconfig: check-sampleconfig:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - uses: actions/setup-python@v2
uses: dtolnay/rust-toolchain@1.61.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: "3.x"
poetry-version: "1.3.2"
extras: "all" extras: "all"
- run: poetry run scripts-dev/generate_sample_config.sh --check - run: poetry run scripts-dev/generate_sample_config.sh --check
- run: poetry run scripts-dev/config-lint.sh - run: poetry run scripts-dev/config-lint.sh
check-schema-delta: check-schema-delta:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-python@v5 - uses: actions/setup-python@v2
with:
python-version: "3.x"
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'" - run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
- run: scripts-dev/check_schema_delta.py --force-colors - run: scripts-dev/check_schema_delta.py --force-colors
check-lockfile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: .ci/scripts/check_lockfile.py
lint: lint:
runs-on: ubuntu-latest uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
needs: changes with:
if: ${{ needs.changes.outputs.linting == 'true' }} typechecking-extras: "all"
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@v1
with:
install-project: "false"
- name: Import order (isort)
run: poetry run isort --check --diff .
- name: Code style (black)
run: poetry run black --check --diff .
- name: Semantic checks (ruff)
# --quiet suppresses the update check.
run: poetry run ruff --quiet .
lint-mypy:
runs-on: ubuntu-latest
name: Typechecking
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
- uses: Swatinem/rust-cache@v2
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@v1
with:
# We want to make use of type hints in optional dependencies too.
extras: all
# We have seen odd mypy failures that were resolved when we started
# installing the project again:
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
# To make CI green, err towards caution and install the project.
install-project: "true"
# Cribbed from
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
- name: Restore/persist mypy's cache
uses: actions/cache@v3
with:
path: |
.mypy_cache
key: mypy-cache-${{ github.context.sha }}
restore-keys: mypy-cache-
- name: Run mypy
run: poetry run mypy
lint-crlf: lint-crlf:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Check line endings - name: Check line endings
run: scripts-dev/check_line_terminators.sh run: scripts-dev/check_line_terminators.sh
@ -190,13 +63,11 @@ jobs:
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }} if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@v5 - uses: actions/setup-python@v2
with:
python-version: "3.x"
- run: "pip install 'towncrier>=18.6.0rc1'" - run: "pip install 'towncrier>=18.6.0rc1'"
- run: scripts-dev/check-newsfragment.sh - run: scripts-dev/check-newsfragment.sh
env: env:
@ -204,19 +75,12 @@ jobs:
lint-pydantic: lint-pydantic:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
poetry-version: "1.3.2"
extras: "all" extras: "all"
- run: poetry run scripts-dev/check_pydantic_models.py - run: poetry run scripts-dev/check_pydantic_models.py
@ -226,34 +90,17 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }} if: ${{ needs.changes.outputs.rust == 'true' }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0 uses: actions-rs/toolchain@v1
with: with:
toolchain: 1.58.1
override: true
components: clippy components: clippy
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- run: cargo clippy -- -D warnings - run: cargo clippy
# We also lint against a nightly rustc so that we can lint the benchmark
# suite, which requires a nightly compiler.
lint-clippy-nightly:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: nightly-2022-12-01
components: clippy
- uses: Swatinem/rust-cache@v2
- run: cargo clippy --all-features -- -D warnings
lint-rustfmt: lint-rustfmt:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -261,14 +108,14 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }} if: ${{ needs.changes.outputs.rust == 'true' }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@master uses: actions-rs/toolchain@v1
with: with:
# We use nightly so that it correctly groups together imports toolchain: 1.58.1
toolchain: nightly-2022-12-01 override: true
components: rustfmt components: rustfmt
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- run: cargo fmt --check - run: cargo fmt --check
@ -278,13 +125,11 @@ jobs:
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
needs: needs:
- lint - lint
- lint-mypy
- lint-crlf - lint-crlf
- lint-newsfile - lint-newsfile
- lint-pydantic - lint-pydantic
- check-sampleconfig - check-sampleconfig
- check-schema-delta - check-schema-delta
- check-lockfile
- lint-clippy - lint-clippy
- lint-rustfmt - lint-rustfmt
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -296,10 +141,8 @@ jobs:
needs: linting-done needs: linting-done
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-python@v5 - uses: actions/setup-python@v2
with:
python-version: "3.x"
- id: get-matrix - id: get-matrix
run: .ci/scripts/calculate_jobs.py run: .ci/scripts/calculate_jobs.py
outputs: outputs:
@ -307,47 +150,35 @@ jobs:
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }} sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
trial: trial:
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: needs: calculate-test-jobs
- calculate-test-jobs
- changes
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }} job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- run: sudo apt-get -qq install xmlsec1 - run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }} - name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
if: ${{ matrix.job.postgres-version }} if: ${{ matrix.job.postgres-version }}
# 1. Mount postgres data files onto a tmpfs in-memory filesystem to reduce overhead of docker's overlayfs layer.
# 2. Expose the unix socket for postgres. This removes latency of using docker-proxy for connections.
run: | run: |
docker run -d -p 5432:5432 \ docker run -d -p 5432:5432 \
--tmpfs /var/lib/postgres:rw,size=6144m \
--mount 'type=bind,src=/var/run/postgresql,dst=/var/run/postgresql' \
-e POSTGRES_PASSWORD=postgres \ -e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.job.postgres-version }} postgres:${{ matrix.job.postgres-version }}
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: ${{ matrix.job.python-version }} python-version: ${{ matrix.job.python-version }}
poetry-version: "1.3.2"
extras: ${{ matrix.job.extras }} extras: ${{ matrix.job.extras }}
- name: Await PostgreSQL - name: Await PostgreSQL
if: ${{ matrix.job.postgres-version }} if: ${{ matrix.job.postgres-version }}
timeout-minutes: 2 timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done run: until pg_isready -h localhost; do sleep 1; done
- run: poetry run trial --jobs=6 tests - run: poetry run trial --jobs=2 tests
env: env:
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }} SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
SYNAPSE_POSTGRES_HOST: /var/run/postgresql SYNAPSE_POSTGRES_HOST: localhost
SYNAPSE_POSTGRES_USER: postgres SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres SYNAPSE_POSTGRES_PASSWORD: postgres
- name: Dump logs - name: Dump logs
@ -365,48 +196,56 @@ jobs:
trial-olddeps: trial-olddeps:
# Note: sqlite only; no postgres # Note: sqlite only; no postgres
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: needs: linting-done
- linting-done
- changes
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0 uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
# There aren't wheels for some of the older deps, so we need to install # There aren't wheels for some of the older deps, so we need to install
# their build dependencies # their build dependencies
- run: | - run: |
sudo apt-get -qq update
sudo apt-get -qq install build-essential libffi-dev python-dev \ sudo apt-get -qq install build-essential libffi-dev python-dev \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
- uses: actions/setup-python@v5 - uses: actions/setup-python@v4
with: with:
python-version: '3.8' python-version: '3.7'
# Calculating the old-deps actually takes a bunch of time, so we cache the
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
# otherwise the `poetry install` step will error due to the poetry.lock
# file being outdated.
#
# This caches the output of `Prepare old deps`, which should generate the
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
- uses: actions/cache@v3
id: cache-poetry-old-deps
name: Cache poetry.lock
with:
path: |
poetry.lock
pyproject.toml
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
- name: Prepare old deps - name: Prepare old deps
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true' if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
run: .ci/scripts/prepare_old_deps.sh run: .ci/scripts/prepare_old_deps.sh
# Note: we install using `pip` here, not poetry. `poetry install` ignores the # We only now install poetry so that `setup-python-poetry` caches the
# build-system section (https://github.com/python-poetry/poetry/issues/6154), but # right poetry.lock's dependencies.
# we explicitly want to test that you can `pip install` using the oldest version - uses: matrix-org/setup-python-poetry@v1
# of poetry-core and setuptools-rust. with:
- run: pip install .[all,test] python-version: '3.7'
extras: "all test"
# We nuke the local copy, as we've installed synapse into the virtualenv - run: poetry run trial -j2 tests
# (rather than use an editable install, which we no longer support). If we
# don't do this then python can't find the native lib.
- run: rm -rf synapse/
# Sanity check we can import/run Synapse
- run: python -m synapse.app.homeserver --help
- run: python -m twisted.trial -j6 tests
- name: Dump logs - name: Dump logs
# Logs are most useful when the command fails, always include them. # Logs are most useful when the command fails, always include them.
if: ${{ always() }} if: ${{ always() }}
@ -423,24 +262,21 @@ jobs:
trial-pypy: trial-pypy:
# Very slow; only run if the branch name includes 'pypy' # Very slow; only run if the branch name includes 'pypy'
# Note: sqlite only; no postgres. Completely untested since poetry move. # Note: sqlite only; no postgres. Completely untested since poetry move.
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() && needs.changes.outputs.trial == 'true' }} if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
needs: needs: linting-done
- linting-done
- changes
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
python-version: ["pypy-3.8"] python-version: ["pypy-3.7"]
extras: ["all"] extras: ["all"]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
# Install libs necessary for PyPy to build binary wheels for dependencies # Install libs necessary for PyPy to build binary wheels for dependencies
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev - run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
poetry-version: "1.3.2"
extras: ${{ matrix.extras }} extras: ${{ matrix.extras }}
- run: poetry run trial --jobs=2 tests - run: poetry run trial --jobs=2 tests
- name: Dump logs - name: Dump logs
@ -457,10 +293,8 @@ jobs:
|| true || true
sytest: sytest:
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }} if: ${{ !failure() && !cancelled() }}
needs: needs: calculate-test-jobs
- calculate-test-jobs
- changes
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }} image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
@ -469,8 +303,7 @@ jobs:
env: env:
SYTEST_BRANCH: ${{ github.head_ref }} SYTEST_BRANCH: ${{ github.head_ref }}
POSTGRES: ${{ matrix.job.postgres && 1}} POSTGRES: ${{ matrix.job.postgres && 1}}
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }} MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }}
WORKERS: ${{ matrix.job.workers && 1 }} WORKERS: ${{ matrix.job.workers && 1 }}
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }} BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
TOP: ${{ github.workspace }} TOP: ${{ github.workspace }}
@ -481,12 +314,15 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }} job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Prepare test blacklist - name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0 uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- name: Run SyTest - name: Run SyTest
@ -505,8 +341,8 @@ jobs:
/logs/**/*.log* /logs/**/*.log*
export-data: export-data:
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
needs: [linting-done, portdb, changes] needs: [linting-done, portdb]
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
TOP: ${{ github.workspace }} TOP: ${{ github.workspace }}
@ -526,11 +362,10 @@ jobs:
--health-retries 5 --health-retries 5
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- run: sudo apt-get -qq install xmlsec1 postgresql-client - run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
poetry-version: "1.3.2"
extras: "postgres" extras: "postgres"
- run: .ci/scripts/test_export_data_command.sh - run: .ci/scripts/test_export_data_command.sh
env: env:
@ -541,19 +376,17 @@ jobs:
portdb: portdb:
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
needs: needs: linting-done
- linting-done
- changes
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
include: include:
- python-version: "3.8" - python-version: "3.7"
postgres-version: "11" postgres-version: "10"
- python-version: "3.11" - python-version: "3.10"
postgres-version: "15" postgres-version: "14"
services: services:
postgres: postgres:
@ -570,21 +403,11 @@ jobs:
--health-retries 5 --health-retries 5
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Add PostgreSQL apt repository
# We need a version of pg_dump that can handle the version of
# PostgreSQL being tested against. The Ubuntu package repository lags
# behind new releases, so we have to use the PostreSQL apt repository.
# Steps taken from https://www.postgresql.org/download/linux/ubuntu/
run: |
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo apt-get update
- run: sudo apt-get -qq install xmlsec1 postgresql-client - run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
poetry-version: "1.3.2"
extras: "postgres" extras: "postgres"
- run: .ci/scripts/test_synapse_port_db.sh - run: .ci/scripts/test_synapse_port_db.sh
id: run_tester_script id: run_tester_script
@ -604,10 +427,8 @@ jobs:
schema_diff schema_diff
complement: complement:
if: "${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}" if: "${{ !failure() && !cancelled() }}"
needs: needs: linting-done
- linting-done
- changes
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
@ -624,31 +445,25 @@ jobs:
database: Postgres database: Postgres
steps: steps:
- name: Run actions/checkout@v4 for synapse - name: Run actions/checkout@v3 for synapse
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
path: synapse path: synapse
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0 uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- name: Prepare Complement's Prerequisites - name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@v5
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
# use p=1 concurrency as GHA boxes are underpowered and don't like running tons of synapses at once.
- run: | - run: |
set -o pipefail set -o pipefail
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -p 1 -json 2>&1 | synapse/.ci/scripts/gotestfmt POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
shell: bash shell: bash
env:
POSTGRES: ${{ (matrix.database == 'Postgres') && 1 || '' }}
WORKERS: ${{ (matrix.arrangement == 'workers') && 1 || '' }}
name: Run Complement Tests name: Run Complement Tests
cargo-test: cargo-test:
@ -659,34 +474,17 @@ jobs:
- changes - changes
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0 uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- run: cargo test - run: cargo test
# We want to ensure that the cargo benchmarks still compile, which requires a
# nightly compiler.
cargo-bench:
if: ${{ needs.changes.outputs.rust == 'true' }}
runs-on: ubuntu-latest
needs:
- linting-done
- changes
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: nightly-2022-12-01
- uses: Swatinem/rust-cache@v2
- run: cargo bench --no-run
# a job which marks all the other jobs as complete, thus allowing PRs to be merged. # a job which marks all the other jobs as complete, thus allowing PRs to be merged.
tests-done: tests-done:
if: ${{ always() }} if: ${{ always() }}
@ -698,23 +496,14 @@ jobs:
- portdb - portdb
- complement - complement
- cargo-test - cargo-test
- cargo-bench
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: matrix-org/done-action@v2 - uses: matrix-org/done-action@v2
with: with:
needs: ${{ toJSON(needs) }} needs: ${{ toJSON(needs) }}
# Various bits are skipped if there was no applicable changes. # The newsfile lint may be skipped on non PR builds
# The newsfile and signoff lint may be skipped on non PR builds. # Cargo test is skipped if there is no changes on Rust code
skippable: | skippable: |
trial
trial-olddeps
sytest
portdb
export-data
complement
check-signoff
lint-newsfile lint-newsfile
cargo-test cargo-test
cargo-bench

View File

@ -5,11 +5,24 @@ on:
types: [ opened ] types: [ opened ]
jobs: jobs:
triage: add_new_issues:
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2 name: Add new issues to the triage board
with: runs-on: ubuntu-latest
project_id: 'PVT_kwDOAIB0Bs4AFDdZ' steps:
content_id: ${{ github.event.issue.node_id }} - uses: octokit/graphql-action@v2.x
secrets: id: add_to_project
github_access_token: ${{ secrets.ELEMENT_BOT_TOKEN }} with:
headers: '{"GraphQL-Features": "projects_next_graphql"}'
query: |
mutation add_to_project($projectid:ID!,$contentid:ID!) {
addProjectV2ItemById(input: {projectId: $projectid contentId: $contentid}) {
item {
id
}
}
}
projectid: ${{ env.PROJECT_ID }}
contentid: ${{ github.event.issue.node_id }}
env:
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}

View File

@ -11,34 +11,34 @@ jobs:
if: > if: >
contains(github.event.issue.labels.*.name, 'X-Needs-Info') contains(github.event.issue.labels.*.name, 'X-Needs-Info')
steps: steps:
- uses: actions/add-to-project@main - uses: octokit/graphql-action@v2.x
id: add_project id: add_to_project
with: with:
project-url: "https://github.com/orgs/matrix-org/projects/67" headers: '{"GraphQL-Features": "projects_next_graphql"}'
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }} query: |
- name: Set status mutation {
env: updateProjectV2ItemFieldValue(
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }} input: {
run: | projectId: $projectid
gh api graphql -f query=' itemId: $contentid
mutation( fieldId: $fieldid
$project: ID! value: {
$item: ID! singleSelectOptionId: "Todo"
$fieldid: ID!
$columnid: String!
) {
updateProjectV2ItemFieldValue(
input: {
projectId: $project
itemId: $item
fieldId: $fieldid
value: {
singleSelectOptionId: $columnid
} }
} }
) { ) {
projectV2Item { projectV2Item {
id id
}
} }
} }
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
projectid: ${{ env.PROJECT_ID }}
contentid: ${{ github.event.issue.node_id }}
fieldid: ${{ env.FIELD_ID }}
optionid: ${{ env.OPTION_ID }}
env:
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
FIELD_ID: "PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4"
OPTION_ID: "ba22e43c"

View File

@ -5,45 +5,23 @@ on:
- cron: 0 8 * * * - cron: 0 8 * * *
workflow_dispatch: workflow_dispatch:
# NB: inputs are only present when this workflow is dispatched manually.
# (The default below is the default field value in the form to trigger
# a manual dispatch). Otherwise the inputs will evaluate to null.
inputs:
twisted_ref:
description: Commit, branch or tag to checkout from upstream Twisted.
required: false
default: 'trunk'
type: string
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
check_repo:
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
# only useful to the Synapse core team.
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
# of the workflow will be skipped as well.
if: github.repository == 'matrix-org/synapse'
runs-on: ubuntu-latest
outputs:
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
steps:
- id: check_condition
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
mypy: mypy:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@stable uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
@ -52,23 +30,24 @@ jobs:
extras: "all" extras: "all"
- run: | - run: |
poetry remove twisted poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }} poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry install --no-interaction --extras "all test" poetry install --no-interaction --extras "all test"
- name: Remove unhelpful options from mypy config - name: Remove warn_unused_ignores from mypy config
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini run: sed '/warn_unused_ignores = True/d' -i mypy.ini
- run: poetry run mypy - run: poetry run mypy
trial: trial:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- run: sudo apt-get -qq install xmlsec1 - run: sudo apt-get -qq install xmlsec1
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@stable uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
@ -95,23 +74,20 @@ jobs:
|| true || true
sytest: sytest:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
# We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version. image: matrixdotorg/sytest-synapse:buster
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
image: matrixdotorg/sytest-synapse:focal
volumes: volumes:
- ${{ github.workspace }}:/src - ${{ github.workspace }}:/src
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@stable uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- name: Patch dependencies - name: Patch dependencies
@ -145,8 +121,7 @@ jobs:
/logs/**/*.log* /logs/**/*.log*
complement: complement:
needs: check_repo if: "${{ !failure() && !cancelled() }}"
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
@ -163,29 +138,25 @@ jobs:
database: Postgres database: Postgres
steps: steps:
- name: Run actions/checkout@v4 for synapse - name: Run actions/checkout@v3 for synapse
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
path: synapse path: synapse
- name: Prepare Complement's Prerequisites - name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@v5
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
# This step is specific to the 'Twisted trunk' test run: # This step is specific to the 'Twisted trunk' test run:
- name: Patch dependencies - name: Patch dependencies
run: | run: |
set -x set -x
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
pipx install poetry==1.3.2 pipx install poetry==1.1.14
poetry remove -n twisted poetry remove -n twisted
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry lock --no-update poetry lock --no-update
# NOT IN 1.1.14 poetry lock --check
working-directory: synapse working-directory: synapse
- run: | - run: |
@ -196,7 +167,7 @@ jobs:
# open an issue if the build fails, so we know about it. # open an issue if the build fails, so we know about it.
open-issue: open-issue:
if: failure() && needs.check_repo.outputs.should_run_workflow == 'true' if: failure()
needs: needs:
- mypy - mypy
- trial - trial
@ -206,8 +177,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1 - uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:

14
.gitignore vendored
View File

@ -15,10 +15,9 @@ _trial_temp*/
.DS_Store .DS_Store
__pycache__/ __pycache__/
# We do want poetry, cargo and flake lockfiles. # We do want the poetry and cargo lockfile.
!poetry.lock !poetry.lock
!Cargo.lock !Cargo.lock
!flake.lock
# stuff that is likely to exist when you run a server locally # stuff that is likely to exist when you run a server locally
/*.db /*.db
@ -34,14 +33,9 @@ __pycache__/
/logs /logs
/media_store/ /media_store/
/uploads /uploads
/homeserver-config-overrides.d
# For direnv users # For direnv users
/.envrc /.envrc
.direnv/
# For nix/devenv users
.devenv/
# IDEs # IDEs
/.idea/ /.idea/
@ -58,7 +52,6 @@ __pycache__/
/coverage.* /coverage.*
/dist/ /dist/
/docs/build/ /docs/build/
/dev-docs/_build/
/htmlcov /htmlcov
/pip-wheel-metadata/ /pip-wheel-metadata/
@ -67,7 +60,7 @@ book/
# complement # complement
/complement-* /complement-*
/main.tar.gz /master.tar.gz
# rust # rust
/target/ /target/
@ -75,6 +68,3 @@ book/
# Poetry will create a setup.py, which we don't want to include. # Poetry will create a setup.py, which we don't want to include.
/setup.py /setup.py
# Don't include users' poetry configs
/poetry.toml

3874
CHANGES.md

File diff suppressed because it is too large Load Diff

138
Cargo.lock generated
View File

@ -4,18 +4,18 @@ version = 3
[[package]] [[package]]
name = "aho-corasick" name = "aho-corasick"
version = "1.0.2" version = "0.7.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.75" version = "1.0.65"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602"
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
@ -37,9 +37,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]] [[package]]
name = "blake2" name = "blake2"
version = "0.10.6" version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" checksum = "b9cf849ee05b2ee5fba5e36f97ff8ec2533916700fc0758d40d92136a42f3388"
dependencies = [ dependencies = [
"digest", "digest",
] ]
@ -90,12 +90,6 @@ dependencies = [
"version_check", "version_check",
] ]
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]] [[package]]
name = "hex" name = "hex"
version = "0.4.3" version = "0.4.3"
@ -104,15 +98,15 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]] [[package]]
name = "indoc" name = "indoc"
version = "2.0.4" version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.4" version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
[[package]] [[package]]
name = "lazy_static" name = "lazy_static"
@ -122,15 +116,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.135" version = "0.2.132"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c" checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5"
[[package]] [[package]]
name = "lock_api" name = "lock_api"
version = "0.4.9" version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"scopeguard", "scopeguard",
@ -138,30 +132,33 @@ dependencies = [
[[package]] [[package]]
name = "log" name = "log"
version = "0.4.20" version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.6.3" version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]] [[package]]
name = "memoffset" name = "memoffset"
version = "0.9.0" version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
dependencies = [ dependencies = [
"autocfg", "autocfg",
] ]
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.15.0" version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
@ -188,18 +185,18 @@ dependencies = [
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.64" version = "1.0.43"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]] [[package]]
name = "pyo3" name = "pyo3"
version = "0.20.0" version = "0.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04e8453b658fe480c3e70c8ed4e3d3ec33eb74988bd186561b0cc66b85c3bc4b" checksum = "12f72538a0230791398a0986a6518ebd88abc3fded89007b506ed072acc831e1"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -215,9 +212,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-build-config" name = "pyo3-build-config"
version = "0.20.0" version = "0.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a96fe70b176a89cff78f2fa7b3c930081e163d5379b4dcdf993e3ae29ca662e5" checksum = "fc4cf18c20f4f09995f3554e6bcf9b09bd5e4d6b67c562fdfaafa644526ba479"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"target-lexicon", "target-lexicon",
@ -225,9 +222,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-ffi" name = "pyo3-ffi"
version = "0.20.0" version = "0.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "214929900fd25e6604661ed9cf349727c8920d47deff196c4e28165a6ef2a96b" checksum = "a41877f28d8ebd600b6aa21a17b40c3b0fc4dfe73a27b6e81ab3d895e401b0e9"
dependencies = [ dependencies = [
"libc", "libc",
"pyo3-build-config", "pyo3-build-config",
@ -235,9 +232,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-log" name = "pyo3-log"
version = "0.9.0" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c10808ee7250403bedb24bc30c32493e93875fef7ba3e4292226fe924f398bd" checksum = "e5695ccff5060c13ca1751cf8c857a12da9b0bf0378cb071c5e0326f7c7e4c1b"
dependencies = [ dependencies = [
"arc-swap", "arc-swap",
"log", "log",
@ -246,9 +243,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-macros" name = "pyo3-macros"
version = "0.20.0" version = "0.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dac53072f717aa1bfa4db832b39de8c875b7c7af4f4a6fe93cdbf9264cf8383b" checksum = "2e81c8d4bcc2f216dc1b665412df35e46d12ee8d3d046b381aad05f1fcf30547"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"pyo3-macros-backend", "pyo3-macros-backend",
@ -258,11 +255,10 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-macros-backend" name = "pyo3-macros-backend"
version = "0.20.0" version = "0.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7774b5a8282bd4f25f803b1f0d945120be959a36c72e08e7cd031c792fdfd424" checksum = "85752a767ee19399a78272cc2ab625cd7d373b2e112b4b13db28de71fa892784"
dependencies = [ dependencies = [
"heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn",
@ -270,9 +266,9 @@ dependencies = [
[[package]] [[package]]
name = "pythonize" name = "pythonize"
version = "0.20.0" version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffd1c3ef39c725d63db5f9bc455461bafd80540cb7824c61afb823501921a850" checksum = "0f7f0c136f5fbc01868185eef462800e49659eb23acca83b9e884367a006acb6"
dependencies = [ dependencies = [
"pyo3", "pyo3",
"serde", "serde",
@ -280,9 +276,9 @@ dependencies = [
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.29" version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
@ -298,21 +294,9 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.9.6" version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff" checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -321,9 +305,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.7.5" version = "0.6.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
[[package]] [[package]]
name = "ryu" name = "ryu"
@ -339,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.193" version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.193" version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -359,9 +343,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.108" version = "1.0.85"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
dependencies = [ dependencies = [
"itoa", "itoa",
"ryu", "ryu",
@ -370,9 +354,9 @@ dependencies = [
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "1.10.0" version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
[[package]] [[package]]
name = "subtle" name = "subtle"
@ -382,9 +366,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.28" version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567" checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -422,15 +406,15 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
[[package]] [[package]]
name = "unicode-ident" name = "unicode-ident"
version = "1.0.5" version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf"
[[package]] [[package]]
name = "unindent" name = "unindent"
version = "0.2.3" version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" checksum = "58ee9362deb4a96cef4d437d1ad49cffc9b9e92d202b6995674e928ce684f112"
[[package]] [[package]]
name = "version_check" name = "version_check"

View File

@ -3,4 +3,3 @@
[workspace] [workspace]
members = ["rust"] members = ["rust"]
resolver = "2"

View File

@ -2,19 +2,10 @@
Synapse |support| |development| |documentation| |license| |pypi| |python| Synapse |support| |development| |documentation| |license| |pypi| |python|
========================================================================= =========================================================================
Synapse is now actively maintained at `element-hq/synapse <https://github.com/element-hq/synapse>`_ Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
================================================================================================= maintained by the Matrix.org Foundation. We began rapid development in 2014,
reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver developed in earnest today.
from 2019 through 2023 as part of the Matrix.org Foundation. The Matrix.org
Foundation is not able to resource maintenance of Synapse and it
`continues to be developed by Element <https://github.com/element-hq/synapse>`_;
additionally you have the choice of `other Matrix homeservers <https://matrix.org/ecosystem/servers/>`_.
See `The future of Synapse and Dendrite <https://matrix.org/blog/2023/11/06/future-of-synapse-dendrite/>`_
blog post for more information.
=========================================================================
Briefly, Matrix is an open standard for communications on the internet, supporting Briefly, Matrix is an open standard for communications on the internet, supporting
federation, encryption and VoIP. Matrix.org has more to say about the `goals of the federation, encryption and VoIP. Matrix.org has more to say about the `goals of the
@ -131,7 +122,7 @@ You will need to change the server you are logging into from ``matrix.org``
and instead specify a Homeserver URL of ``https://<server_name>:8448`` and instead specify a Homeserver URL of ``https://<server_name>:8448``
(or just ``https://<server_name>`` if you are using a reverse proxy). (or just ``https://<server_name>`` if you are using a reverse proxy).
If you prefer to use another client, refer to our If you prefer to use another client, refer to our
`client breakdown <https://matrix.org/ecosystem/clients/>`_. `client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
If all goes well you should at least be able to log in, create a room, and If all goes well you should at least be able to log in, create a room, and
start sending messages. start sending messages.

View File

@ -34,14 +34,6 @@ additional-css = [
"docs/website_files/table-of-contents.css", "docs/website_files/table-of-contents.css",
"docs/website_files/remove-nav-buttons.css", "docs/website_files/remove-nav-buttons.css",
"docs/website_files/indent-section-headers.css", "docs/website_files/indent-section-headers.css",
"docs/website_files/version-picker.css",
] ]
additional-js = [ additional-js = ["docs/website_files/table-of-contents.js"]
"docs/website_files/table-of-contents.js", theme = "docs/website_files/theme"
"docs/website_files/version-picker.js",
"docs/website_files/version.js",
]
theme = "docs/website_files/theme"
[preprocessor.schema_versions]
command = "./scripts-dev/schema_versions.py"

View File

@ -1 +0,0 @@
Added version picker for Synapse documentation. Contributed by @Dmytro27Ind.

View File

@ -1 +0,0 @@
Add config options to set the avatar and the topic of the server notices room.

View File

@ -1 +0,0 @@
Add a setting to be able to tweak the delay without interaction before an email is sent following a notification.

View File

@ -1 +0,0 @@
Update the implementation of [MSC2965](https://github.com/matrix-org/matrix-spec-proposals/pull/2965) (OIDC Provider discovery).

View File

@ -1 +0,0 @@
Clarify that `password_config.enabled: "only_for_reauth"` does not allow new logins to be created using password auth.

View File

@ -1 +0,0 @@
Add new Sentry configuration option `environment` for improved system monitoring. Contributed by @zeeshanrafiqrana.

View File

@ -1 +0,0 @@
Fix a long-standing bug where the signing keys generated by Synapse were world-readable. Contributed by Fabian Klemp.

View File

@ -769,7 +769,7 @@ def main(server_url, identity_server_url, username, token, config_path):
global CONFIG_JSON global CONFIG_JSON
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
try: try:
with open(config_path) as config: with open(config_path, "r") as config:
syn_cmd.config = json.load(config) syn_cmd.config = json.load(config)
try: try:
http_client.verbose = "on" == syn_cmd.config["verbose"] http_client.verbose = "on" == syn_cmd.config["verbose"]

View File

@ -37,6 +37,7 @@ class HttpClient:
Deferred: Succeeds when we get a 2xx HTTP response. The result Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body. will be the decoded JSON body.
""" """
pass
def get_json(self, url, args=None): def get_json(self, url, args=None):
"""Gets some json from the given host homeserver and path """Gets some json from the given host homeserver and path
@ -52,6 +53,7 @@ class HttpClient:
Deferred: Succeeds when we get a 2xx HTTP response. The result Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body. will be the decoded JSON body.
""" """
pass
class TwistedHttpClient(HttpClient): class TwistedHttpClient(HttpClient):

View File

@ -1,28 +0,0 @@
# Schema symlinks
This directory contains symlinks to the latest dump of the postgres full schema. This is useful to have, as it allows IDEs to understand our schema and provide autocomplete, linters, inspections, etc.
In particular, the DataGrip functionality in IntelliJ's products seems to only consider files called `*.sql` when defining a schema from DDL; `*.sql.postgres` will be ignored. To get around this we symlink those files to ones ending in `.sql`. We've chosen to ignore the `.sql.sqlite` schema dumps here, as they're not intended for production use (and are much quicker to test against).
## Example
![](datagrip-aware-of-schema.png)
## Caveats
- Doesn't include temporary tables created ad-hoc by Synapse.
- Postgres only. IDEs will likely be confused by SQLite-specific queries.
- Will not include migrations created after the latest schema dump.
- Symlinks might confuse checkouts on Windows systems.
## Instructions
### Jetbrains IDEs with DataGrip plugin
- View -> Tool Windows -> Database
- `+` Icon -> DDL Data Source
- Pick a name, e.g. `Synapse schema dump`
- Under sources, click `+`.
- Add an entry with Path pointing to this directory, and dialect set to PostgreSQL.
- OK, and OK.
- IDE should now be aware of the schema.
- Try control-clicking on a table name in a bit of SQL e.g. in `_get_forgotten_rooms_for_user_txn`.

View File

@ -1 +0,0 @@
../../synapse/storage/schema/common/full_schemas/72/full.sql.postgres

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

View File

@ -1 +0,0 @@
../../synapse/storage/schema/main/full_schemas/72/full.sql.postgres

View File

@ -1 +0,0 @@
../../synapse/storage/schema/common/schema_version.sql

View File

@ -1 +0,0 @@
../../synapse/storage/schema/state/full_schemas/72/full.sql.postgres

View File

@ -68,12 +68,7 @@ redis:
enabled: true enabled: true
host: redis host: redis
port: 6379 port: 6379
# dbid: <redis_logical_db_id>
# password: <secret_password> # password: <secret_password>
# use_tls: True
# certificate_file: <path_to_certificate>
# private_key_file: <path_to_private_key>
# ca_file: <path_to_ca_certificate>
``` ```
This assumes that your Redis service is called `redis` in your Docker Compose file. This assumes that your Redis service is called `redis` in your Docker Compose file.
@ -99,6 +94,20 @@ worker_replication_host: synapse
worker_replication_http_port: 9093 worker_replication_http_port: 9093
``` ```
### Add Workers to `instance_map`
Locate the `instance_map` section of your `homeserver.yaml` and populate it with your workers:
```yaml
instance_map:
synapse-generic-worker-1: # The worker_name setting in your worker configuration file
host: synapse-generic-worker-1 # The name of the worker service in your Docker Compose file
port: 8034 # The port assigned to the replication listener in your worker config file
synapse-federation-sender-1:
host: synapse-federation-sender-1
port: 8034
```
### Configure Federation Senders ### Configure Federation Senders
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them: This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
@ -113,4 +122,4 @@ federation_sender_instances:
## Other Worker types ## Other Worker types
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers. Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.

View File

@ -5,4 +5,10 @@ worker_name: synapse-federation-sender-1
worker_replication_host: synapse worker_replication_host: synapse
worker_replication_http_port: 9093 worker_replication_http_port: 9093
worker_listeners:
- type: http
port: 8034
resources:
- names: [replication]
worker_log_config: /data/federation_sender.log.config worker_log_config: /data/federation_sender.log.config

View File

@ -6,6 +6,10 @@ worker_replication_host: synapse
worker_replication_http_port: 9093 worker_replication_http_port: 9093
worker_listeners: worker_listeners:
- type: http
port: 8034
resources:
- names: [replication]
- type: http - type: http
port: 8081 port: 8081
x_forwarded: true x_forwarded: true

File diff suppressed because it is too large Load Diff

View File

@ -1,47 +0,0 @@
# `lnav` config for Synapse logs
[lnav](https://lnav.org/) is a log-viewing tool. It is particularly useful when
you need to interleave multiple log files, or for exploring a large log file
with regex filters. The downside is that it is not as ubiquitous as tools like
`less`, `grep`, etc.
This directory contains an `lnav` [log format definition](
https://docs.lnav.org/en/v0.10.1/formats.html#defining-a-new-format
) for Synapse logs as
emitted by Synapse with the default [logging configuration](
https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#log_config
). It supports lnav 0.10.1 because that's what's packaged by my distribution.
This should allow lnav:
- to interpret timestamps, allowing log interleaving;
- to interpret log severity levels, allowing colouring by log level(!!!);
- to interpret request IDs, allowing you to skip through a specific request; and
- to highlight room, event and user IDs in logs.
See also https://gist.github.com/benje/e2ab750b0a81d11920d83af637d289f7 for a
similar example.
## Example
[![asciicast](https://asciinema.org/a/556133.svg)](https://asciinema.org/a/556133)
## Tips
- `lnav -i /path/to/synapse/checkout/contrib/lnav/synapse-log-format.json`
- `lnav my_synapse_log_file` or `lnav synapse_log_files.*`, etc.
- `lnav --help` for CLI help.
Within lnav itself:
- `?` for help within lnav itself.
- `q` to quit.
- `/` to search a-la `less` and `vim`, then `n` and `N` to continue searching
down and up.
- Use `o` and `O` to skip through logs based on the request ID (`POST-1234`, or
else the value of the [`request_id_header`](
https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html?highlight=request_id_header#listeners
) header). This may get confused if the same request ID is repeated among
multiple files or process restarts.
- ???
- Profit

View File

@ -1,67 +0,0 @@
{
"$schema": "https://lnav.org/schemas/format-v1.schema.json",
"synapse": {
"title": "Synapse logs",
"description": "Logs output by Synapse, a Matrix homesever, under its default logging config.",
"regex": {
"log": {
"pattern": ".*(?<timestamp>\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}) - (?<logger>.+) - (?<lineno>\\d+) - (?<level>\\w+) - (?<context>.+) - (?<body>.*)"
}
},
"json": false,
"timestamp-field": "timestamp",
"timestamp-format": [
"%Y-%m-%d %H:%M:%S,%L"
],
"level-field": "level",
"body-field": "body",
"opid-field": "context",
"level": {
"critical": "CRITICAL",
"error": "ERROR",
"warning": "WARNING",
"info": "INFO",
"debug": "DEBUG"
},
"sample": [
{
"line": "my-matrix-server-generic-worker-4 | 2023-01-27 09:47:09,818 - synapse.replication.tcp.client - 381 - ERROR - PUT-32992 - Timed out waiting for stream receipts",
"level": "error"
},
{
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix-federation://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
"level": "warning"
},
{
"line": "my-matrix-server | 2023-01-25 20:55:54,433 - synapse.storage.databases - 66 - INFO - main - [database config 'master']: Checking database server",
"level": "info"
},
{
"line": "my-matrix-server | 2023-01-26 15:08:40,447 - synapse.access.http.8008 - 460 - INFO - PUT-74929 - 0.0.0.0 - 8008 - {@alice:example.com} Processed request: 0.011sec/0.000sec (0.000sec, 0.000sec) (0.001sec/0.008sec/3) 2B 200 \"PUT /_matrix/client/r0/user/%40alice%3Atexample.com/account_data/im.vector.setting.breadcrumbs HTTP/1.0\" \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Element/1.11.20 Chrome/108.0.5359.179 Electron/22.0.3 Safari/537.36\" [0 dbevts]",
"level": "info"
}
],
"highlights": {
"user_id": {
"pattern": "(@|%40)[^:% ]+(:|%3A)[\\[\\]0-9a-zA-Z.\\-:]+(:\\d{1,5})?(?<!:)",
"underline": true
},
"room_id": {
"pattern": "(!|%21)[^:% ]+(:|%3A)[\\[\\]0-9a-zA-Z.\\-:]+(:\\d{1,5})?(?<!:)",
"underline": true
},
"room_alias": {
"pattern": "(#|%23)[^:% ]+(:|%3A)[\\[\\]0-9a-zA-Z.\\-:]+(:\\d{1,5})?(?<!:)",
"underline": true
},
"event_id_v1_v2": {
"pattern": "(\\$|%25)[^:% ]+(:|%3A)[\\[\\]0-9a-zA-Z.\\-:]+(:\\d{1,5})?(?<!:)",
"underline": true
},
"event_id_v3_plus": {
"pattern": "(\\$|%25)([A-Za-z0-9+/_]|-){43}",
"underline": true
}
}
}
}

View File

@ -15,19 +15,19 @@ worker_name: generic_worker$i
worker_replication_host: 127.0.0.1 worker_replication_host: 127.0.0.1
worker_replication_http_port: 9093 worker_replication_http_port: 9093
worker_main_http_uri: http://localhost:8008/
worker_listeners: worker_listeners:
- type: http - type: http
port: 808$i port: 808$i
x_forwarded: true
resources: resources:
- names: [client, federation] - names: [client, federation]
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
#worker_pid_file: DATADIR/generic_worker$i.pid
EOF EOF
done done
``` ```
This would create five generic workers with a unique `worker_name` field in each file and listening on ports 8081-8085. This would create five generic workers with a unique `worker_name` field in each file and listening on ports 8081-8085.
Customise the script to your needs. Note that `worker_pid_file` is required if `worker_daemonize` is `true`. Uncomment and/or modify the line if needed. Customise the script to your needs.

View File

@ -8,9 +8,7 @@ It also prints out the example lines for Synapse main configuration file.
Remember to route necessary endpoints directly to a worker associated with it. Remember to route necessary endpoints directly to a worker associated with it.
If you run the script as-is, it will create workers with the replication listener starting from port 8034 and another, regular http listener starting from 8044. If you don't need all of the stream writers listed in the script, just remove them from the ```STREAM_WRITERS``` array. If you run the script as-is, it will create workers with the replication listener starting from port 8034 and another, regular http listener starting from 8044. If you don't need all of the stream writers listed in the script, just remove them from the ```STREAM_WRITERS``` array.
Hint: Note that `worker_pid_file` is required if `worker_daemonize` is `true`. Uncomment and/or modify the line if needed.
```sh ```sh
#!/bin/bash #!/bin/bash
@ -48,11 +46,9 @@ worker_listeners:
- type: http - type: http
port: $(expr $HTTP_START_PORT + $i) port: $(expr $HTTP_START_PORT + $i)
x_forwarded: true
resources: resources:
- names: [client] - names: [client]
#worker_pid_file: DATADIR/${STREAM_WRITERS[$i]}.pid
worker_log_config: /etc/matrix-synapse/stream-writer-log.yaml worker_log_config: /etc/matrix-synapse/stream-writer-log.yaml
EOF EOF
HOMESERVER_YAML_INSTANCE_MAP+=$" ${STREAM_WRITERS[$i]}_stream_writer: HOMESERVER_YAML_INSTANCE_MAP+=$" ${STREAM_WRITERS[$i]}_stream_writer:
@ -95,9 +91,7 @@ Simply run the script to create YAML files in the current folder and print out t
```console ```console
$ ./create_stream_writers.sh $ ./create_stream_writers.sh
```
You should receive an output similar to the following:
```console
# Add these lines to your homeserver.yaml. # Add these lines to your homeserver.yaml.
# Don't forget to configure your reverse proxy and # Don't forget to configure your reverse proxy and
# necessary endpoints to their respective worker. # necessary endpoints to their respective worker.

View File

@ -31,11 +31,12 @@ case $(dpkg-architecture -q DEB_HOST_ARCH) in
esac esac
# Manually install Poetry and export a pip-compatible `requirements.txt` # Manually install Poetry and export a pip-compatible `requirements.txt`
# We need a Poetry pre-release as the export command is buggy in < 1.2
TEMP_VENV="$(mktemp -d)" TEMP_VENV="$(mktemp -d)"
python3 -m venv "$TEMP_VENV" python3 -m venv "$TEMP_VENV"
source "$TEMP_VENV/bin/activate" source "$TEMP_VENV/bin/activate"
pip install -U pip pip install -U pip
pip install poetry==1.3.2 pip install poetry==1.2.0
poetry export \ poetry export \
--extras all \ --extras all \
--extras test \ --extras test \

510
debian/changelog vendored
View File

@ -1,511 +1,3 @@
matrix-synapse-py3 (1.98.0) stable; urgency=medium
* New Synapse release 1.98.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Dec 2023 15:04:31 +0000
matrix-synapse-py3 (1.98.0~rc1) stable; urgency=medium
* New Synapse release 1.98.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Dec 2023 13:08:42 +0000
matrix-synapse-py3 (1.97.0) stable; urgency=medium
* New Synapse release 1.97.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Nov 2023 14:08:58 +0000
matrix-synapse-py3 (1.97.0~rc1) stable; urgency=medium
* New Synapse release 1.97.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Nov 2023 12:32:03 +0000
matrix-synapse-py3 (1.96.1) stable; urgency=medium
* New synapse release 1.96.1.
-- Synapse Packaging team <packages@matrix.org> Fri, 17 Nov 2023 12:48:45 +0000
matrix-synapse-py3 (1.96.0) stable; urgency=medium
* New synapse release 1.96.0.
-- Synapse Packaging team <packages@matrix.org> Thu, 16 Nov 2023 17:54:26 +0000
matrix-synapse-py3 (1.96.0~rc1) stable; urgency=medium
* New Synapse release 1.96.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Oct 2023 14:09:09 +0000
matrix-synapse-py3 (1.95.1) stable; urgency=medium
* New Synapse release 1.95.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Oct 2023 14:00:00 +0000
matrix-synapse-py3 (1.95.0) stable; urgency=medium
* New Synapse release 1.95.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Oct 2023 13:00:46 +0100
matrix-synapse-py3 (1.95.0~rc1) stable; urgency=medium
* New synapse release 1.95.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Oct 2023 15:50:17 +0000
matrix-synapse-py3 (1.94.0) stable; urgency=medium
* New Synapse release 1.94.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Oct 2023 10:57:41 +0100
matrix-synapse-py3 (1.94.0~rc1) stable; urgency=medium
* New Synapse release 1.94.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Oct 2023 11:48:18 +0100
matrix-synapse-py3 (1.93.0) stable; urgency=medium
* New Synapse release 1.93.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Sep 2023 15:54:40 +0100
matrix-synapse-py3 (1.93.0~rc1) stable; urgency=medium
* New synapse release 1.93.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Sep 2023 11:55:00 +0000
matrix-synapse-py3 (1.92.3) stable; urgency=medium
* New Synapse release 1.92.3.
-- Synapse Packaging team <packages@matrix.org> Mon, 18 Sep 2023 15:05:04 +0200
matrix-synapse-py3 (1.92.2) stable; urgency=medium
* New Synapse release 1.92.2.
-- Synapse Packaging team <packages@matrix.org> Fri, 15 Sep 2023 13:17:41 +0100
matrix-synapse-py3 (1.92.1) stable; urgency=medium
* New Synapse release 1.92.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Sep 2023 13:19:42 +0200
matrix-synapse-py3 (1.92.0) stable; urgency=medium
* New Synapse release 1.92.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Sep 2023 11:59:23 +0200
matrix-synapse-py3 (1.91.2) stable; urgency=medium
* New synapse release 1.91.2.
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Sep 2023 14:59:30 +0000
matrix-synapse-py3 (1.92.0~rc1) stable; urgency=medium
* New Synapse release 1.92.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Sep 2023 11:21:43 +0100
matrix-synapse-py3 (1.91.1) stable; urgency=medium
* New Synapse release 1.91.1.
-- Synapse Packaging team <packages@matrix.org> Mon, 04 Sep 2023 14:03:18 +0100
matrix-synapse-py3 (1.91.0) stable; urgency=medium
* New Synapse release 1.91.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Aug 2023 11:18:10 +0100
matrix-synapse-py3 (1.91.0~rc1) stable; urgency=medium
* New Synapse release 1.91.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 23 Aug 2023 09:47:18 -0700
matrix-synapse-py3 (1.90.0) stable; urgency=medium
* New Synapse release 1.90.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Aug 2023 11:17:34 +0100
matrix-synapse-py3 (1.90.0~rc1) stable; urgency=medium
* New Synapse release 1.90.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Aug 2023 15:29:34 +0100
matrix-synapse-py3 (1.89.0) stable; urgency=medium
* New Synapse release 1.89.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Aug 2023 11:07:15 +0100
matrix-synapse-py3 (1.89.0~rc1) stable; urgency=medium
* New Synapse release 1.89.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Jul 2023 14:31:07 +0200
matrix-synapse-py3 (1.88.0) stable; urgency=medium
* New Synapse release 1.88.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jul 2023 13:59:28 +0100
matrix-synapse-py3 (1.88.0~rc1) stable; urgency=medium
* New Synapse release 1.88.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Jul 2023 10:20:19 +0100
matrix-synapse-py3 (1.87.0) stable; urgency=medium
* New Synapse release 1.87.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Jul 2023 16:24:00 +0100
matrix-synapse-py3 (1.87.0~rc1) stable; urgency=medium
* New synapse release 1.87.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Jun 2023 15:27:04 +0000
matrix-synapse-py3 (1.86.0) stable; urgency=medium
* New Synapse release 1.86.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Jun 2023 17:22:46 +0200
matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium
* New Synapse release 1.86.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 14 Jun 2023 12:16:27 +0200
matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium
* New Synapse release 1.86.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jun 2023 14:30:45 +0200
matrix-synapse-py3 (1.85.2) stable; urgency=medium
* New Synapse release 1.85.2.
-- Synapse Packaging team <packages@matrix.org> Thu, 08 Jun 2023 13:04:18 +0100
matrix-synapse-py3 (1.85.1) stable; urgency=medium
* New Synapse release 1.85.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 07 Jun 2023 10:51:12 +0100
matrix-synapse-py3 (1.85.0) stable; urgency=medium
* New Synapse release 1.85.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Jun 2023 09:39:29 +0100
matrix-synapse-py3 (1.85.0~rc2) stable; urgency=medium
* New Synapse release 1.85.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 01 Jun 2023 09:16:18 -0700
matrix-synapse-py3 (1.85.0~rc1) stable; urgency=medium
* New Synapse release 1.85.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 30 May 2023 13:56:54 +0100
matrix-synapse-py3 (1.84.1) stable; urgency=medium
* New Synapse release 1.84.1.
-- Synapse Packaging team <packages@matrix.org> Fri, 26 May 2023 16:15:30 +0100
matrix-synapse-py3 (1.84.0) stable; urgency=medium
* New Synapse release 1.84.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 23 May 2023 10:57:22 +0100
matrix-synapse-py3 (1.84.0~rc1) stable; urgency=medium
* New Synapse release 1.84.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 16 May 2023 11:12:02 +0100
matrix-synapse-py3 (1.83.0) stable; urgency=medium
* New Synapse release 1.83.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 09 May 2023 18:13:37 +0200
matrix-synapse-py3 (1.83.0~rc1) stable; urgency=medium
* New Synapse release 1.83.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 02 May 2023 15:56:38 +0100
matrix-synapse-py3 (1.82.0) stable; urgency=medium
* New Synapse release 1.82.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Apr 2023 11:56:06 +0100
matrix-synapse-py3 (1.82.0~rc1) stable; urgency=medium
* New Synapse release 1.82.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Apr 2023 09:47:30 +0100
matrix-synapse-py3 (1.81.0) stable; urgency=medium
* New Synapse release 1.81.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Apr 2023 14:18:35 +0100
matrix-synapse-py3 (1.81.0~rc2) stable; urgency=medium
* New Synapse release 1.81.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 06 Apr 2023 16:07:54 +0100
matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium
* New Synapse release 1.81.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Apr 2023 14:29:03 +0100
matrix-synapse-py3 (1.80.0) stable; urgency=medium
* New Synapse release 1.80.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Mar 2023 11:10:33 +0100
matrix-synapse-py3 (1.80.0~rc2) stable; urgency=medium
* New Synapse release 1.80.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 22 Mar 2023 08:30:16 -0700
matrix-synapse-py3 (1.80.0~rc1) stable; urgency=medium
* New Synapse release 1.80.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Mar 2023 10:56:08 -0700
matrix-synapse-py3 (1.79.0) stable; urgency=medium
* New Synapse release 1.79.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Mar 2023 16:14:50 +0100
matrix-synapse-py3 (1.79.0~rc2) stable; urgency=medium
* New Synapse release 1.79.0rc2.
-- Synapse Packaging team <packages@matrix.org> Mon, 13 Mar 2023 12:54:21 +0000
matrix-synapse-py3 (1.79.0~rc1) stable; urgency=medium
* New Synapse release 1.79.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Mar 2023 12:03:49 +0000
matrix-synapse-py3 (1.78.0) stable; urgency=medium
* New Synapse release 1.78.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Feb 2023 08:56:03 -0800
matrix-synapse-py3 (1.78.0~rc1) stable; urgency=medium
* Add `matrix-org-archive-keyring` package as recommended.
* New Synapse release 1.78.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Feb 2023 14:29:19 +0000
matrix-synapse-py3 (1.77.0) stable; urgency=medium
* New Synapse release 1.77.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Feb 2023 12:59:02 +0100
matrix-synapse-py3 (1.77.0~rc2) stable; urgency=medium
* New Synapse release 1.77.0rc2.
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Feb 2023 12:44:21 +0000
matrix-synapse-py3 (1.77.0~rc1) stable; urgency=medium
* New Synapse release 1.77.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Feb 2023 13:45:14 +0000
matrix-synapse-py3 (1.76.0) stable; urgency=medium
* New Synapse release 1.76.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Jan 2023 08:21:47 -0800
matrix-synapse-py3 (1.76.0~rc2) stable; urgency=medium
* New Synapse release 1.76.0rc2.
-- Synapse Packaging team <packages@matrix.org> Fri, 27 Jan 2023 11:17:57 +0000
matrix-synapse-py3 (1.76.0~rc1) stable; urgency=medium
* Use Poetry 1.3.2 to manage the bundled virtualenv included with this package.
* New Synapse release 1.76.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 25 Jan 2023 16:21:16 +0000
matrix-synapse-py3 (1.75.0) stable; urgency=medium
* New Synapse release 1.75.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jan 2023 11:36:02 +0000
matrix-synapse-py3 (1.75.0~rc2) stable; urgency=medium
* New Synapse release 1.75.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 12 Jan 2023 10:30:15 -0800
matrix-synapse-py3 (1.75.0~rc1) stable; urgency=medium
* New Synapse release 1.75.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jan 2023 12:18:27 +0000
matrix-synapse-py3 (1.74.0) stable; urgency=medium
* New Synapse release 1.74.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Dec 2022 16:07:38 +0000
matrix-synapse-py3 (1.74.0~rc1) stable; urgency=medium
* New dependency on libicu-dev to provide improved results for user
search.
* New Synapse release 1.74.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Dec 2022 13:30:01 +0000
matrix-synapse-py3 (1.73.0) stable; urgency=medium
* New Synapse release 1.73.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Dec 2022 11:48:56 +0000
matrix-synapse-py3 (1.73.0~rc2) stable; urgency=medium
* New Synapse release 1.73.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 01 Dec 2022 10:02:19 +0000
matrix-synapse-py3 (1.73.0~rc1) stable; urgency=medium
* New Synapse release 1.73.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Nov 2022 12:28:13 +0000
matrix-synapse-py3 (1.72.0) stable; urgency=medium
* New Synapse release 1.72.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Nov 2022 10:57:30 +0000
matrix-synapse-py3 (1.72.0~rc1) stable; urgency=medium
* New Synapse release 1.72.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 16 Nov 2022 15:10:59 +0000
matrix-synapse-py3 (1.71.0) stable; urgency=medium
* New Synapse release 1.71.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Nov 2022 10:38:10 +0000
matrix-synapse-py3 (1.71.0~rc2) stable; urgency=medium
* New Synapse release 1.71.0rc2.
-- Synapse Packaging team <packages@matrix.org> Fri, 04 Nov 2022 12:00:33 +0000
matrix-synapse-py3 (1.71.0~rc1) stable; urgency=medium
* New Synapse release 1.71.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Nov 2022 12:10:17 +0000
matrix-synapse-py3 (1.70.1) stable; urgency=medium
* New Synapse release 1.70.1.
-- Synapse Packaging team <packages@matrix.org> Fri, 28 Oct 2022 12:10:21 +0100
matrix-synapse-py3 (1.70.0) stable; urgency=medium
* New Synapse release 1.70.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Oct 2022 11:11:50 +0100
matrix-synapse-py3 (1.70.0~rc2) stable; urgency=medium
* New Synapse release 1.70.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Oct 2022 10:59:47 +0100
matrix-synapse-py3 (1.70.0~rc1) stable; urgency=medium
* New Synapse release 1.70.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 19 Oct 2022 14:11:57 +0100
matrix-synapse-py3 (1.69.0) stable; urgency=medium
* New Synapse release 1.69.0.
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Oct 2022 11:31:03 +0100
matrix-synapse-py3 (1.69.0~rc4) stable; urgency=medium
* New Synapse release 1.69.0rc4.
-- Synapse Packaging team <packages@matrix.org> Fri, 14 Oct 2022 15:04:47 +0100
matrix-synapse-py3 (1.69.0~rc3) stable; urgency=medium
* New Synapse release 1.69.0rc3.
-- Synapse Packaging team <packages@matrix.org> Wed, 12 Oct 2022 13:24:04 +0100
matrix-synapse-py3 (1.69.0~rc2) stable; urgency=medium
* New Synapse release 1.69.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 06 Oct 2022 14:45:00 +0100
matrix-synapse-py3 (1.69.0~rc1) stable; urgency=medium matrix-synapse-py3 (1.69.0~rc1) stable; urgency=medium
* The man page for the hash_password script has been updated to reflect * The man page for the hash_password script has been updated to reflect
@ -1673,7 +1165,7 @@ matrix-synapse-py3 (0.99.3.1) stable; urgency=medium
matrix-synapse-py3 (0.99.3) stable; urgency=medium matrix-synapse-py3 (0.99.3) stable; urgency=medium
[ Richard van der Hoff ] [ Richard van der Hoff ]
* Fix warning during preconfiguration. (Fixes: https://github.com/matrix-org/synapse/issues/4819) * Fix warning during preconfiguration. (Fixes: #4819)
[ Synapse Packaging team ] [ Synapse Packaging team ]
* New synapse release 0.99.3. * New synapse release 0.99.3.

3
debian/control vendored
View File

@ -8,8 +8,6 @@ Build-Depends:
dh-virtualenv (>= 1.1), dh-virtualenv (>= 1.1),
libsystemd-dev, libsystemd-dev,
libpq-dev, libpq-dev,
libicu-dev,
pkg-config,
lsb-release, lsb-release,
python3-dev, python3-dev,
python3, python3,
@ -37,7 +35,6 @@ Depends:
# so we put perl:Depends in Suggests rather than Depends. # so we put perl:Depends in Suggests rather than Depends.
Recommends: Recommends:
${shlibs1:Recommends}, ${shlibs1:Recommends},
matrix-org-archive-keyring,
Suggests: Suggests:
sqlite3, sqlite3,
${perl:Depends}, ${perl:Depends},

View File

@ -46,7 +46,7 @@ for port in 8080 8081 8082; do
echo '' echo ''
# Warning, this heredoc depends on the interaction of tabs and spaces. # Warning, this heredoc depends on the interaction of tabs and spaces.
# Please don't accidentally bork me with your fancy settings. # Please don't accidentaly bork me with your fancy settings.
listeners=$(cat <<-PORTLISTENERS listeners=$(cat <<-PORTLISTENERS
# Configure server to listen on both $https_port and $port # Configure server to listen on both $https_port and $port
# This overides some of the default settings above # This overides some of the default settings above
@ -80,8 +80,12 @@ for port in 8080 8081 8082; do
echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\"" echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\""
echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\"" echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\""
# Request keys directly from servers contacted over federation # Ignore keys from the trusted keys server
echo 'trusted_key_servers: []' echo '# Ignore keys from the trusted keys server'
echo 'trusted_key_servers:'
echo ' - server_name: "matrix.org"'
echo ' accept_keys_insecurely: true'
echo ''
# Allow the servers to communicate over localhost. # Allow the servers to communicate over localhost.
allow_list=$(cat <<-ALLOW_LIST allow_list=$(cat <<-ALLOW_LIST

View File

@ -1,20 +0,0 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

View File

@ -1,50 +0,0 @@
# Configuration file for the Sphinx documentation builder.
#
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
project = "Synapse development"
copyright = "2023, The Matrix.org Foundation C.I.C."
author = "The Synapse Maintainers and Community"
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
extensions = [
"autodoc2",
"myst_parser",
]
templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for Autodoc2 ----------------------------------------------------
autodoc2_docstring_parser_regexes = [
# this will render all docstrings as 'MyST' Markdown
(r".*", "myst"),
]
autodoc2_packages = [
{
"path": "../synapse",
# Don't render documentation for everything as a matter of course
"auto_mode": False,
},
]
# -- Options for MyST (Markdown) ---------------------------------------------
# myst_heading_anchors = 2
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = "furo"
html_static_path = ["_static"]

View File

@ -1,22 +0,0 @@
.. Synapse Developer Documentation documentation master file, created by
sphinx-quickstart on Mon Mar 13 08:59:51 2023.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to the Synapse Developer Documentation!
===========================================================
.. toctree::
:maxdepth: 2
:caption: Contents:
modules/federation_sender
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

View File

@ -1,5 +0,0 @@
Federation Sender
=================
```{autodoc2-docstring} synapse.federation.sender
```

View File

@ -17,48 +17,39 @@
# Irritatingly, there is no blessed guide on how to distribute an application with its # Irritatingly, there is no blessed guide on how to distribute an application with its
# poetry-managed environment in a docker image. We have opted for # poetry-managed environment in a docker image. We have opted for
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in # `poetry export | pip install -r /dev/stdin`, but there are known bugs in
# in `poetry export` in the past. # in `poetry export` whose fixes (scheduled for poetry 1.2) have yet to be released.
# In case we get bitten by those bugs in the future, the recommendations here might
# be useful:
# https://github.com/python-poetry/poetry/discussions/1879#discussioncomment-216865
# https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker?answertab=scoredesc
ARG PYTHON_VERSION=3.11
ARG PYTHON_VERSION=3.9
### ###
### Stage 0: generate requirements.txt ### Stage 0: generate requirements.txt
### ###
# We hardcode the use of Debian bookworm here because this could change upstream # We hardcode the use of Debian bullseye here because this could change upstream
# and other Dockerfiles used for testing are expecting bookworm. # and other Dockerfiles used for testing are expecting bullseye.
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as requirements FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
# RUN --mount is specific to buildkit and is documented at # RUN --mount is specific to buildkit and is documented at
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount. # https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
# Here we use it to set up a cache for apt (and below for pip), to improve # Here we use it to set up a cache for apt (and below for pip), to improve
# rebuild speeds on slow connections. # rebuild speeds on slow connections.
RUN \ RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \ apt-get update -qq && apt-get install -yqq \
build-essential curl git libffi-dev libssl-dev pkg-config \ build-essential cargo git libffi-dev libssl-dev \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Install rust and ensure its in the PATH.
# (Rust may be needed to compile `cryptography`---which is one of poetry's
# dependencies---on platforms that don't have a `cryptography` wheel.
ENV RUSTUP_HOME=/rust
ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
# set to true, so we expose it as a build-arg.
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
# We install poetry in its own build stage to avoid its dependencies conflicting with # We install poetry in its own build stage to avoid its dependencies conflicting with
# synapse's dependencies. # synapse's dependencies.
RUN --mount=type=cache,target=/root/.cache/pip \ RUN --mount=type=cache,target=/root/.cache/pip \
pip install --user "poetry==1.3.2" pip install --user "poetry==1.2.0"
WORKDIR /synapse WORKDIR /synapse
@ -79,36 +70,34 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
# Otherwise, just create an empty requirements file so that the Dockerfile can # Otherwise, just create an empty requirements file so that the Dockerfile can
# proceed. # proceed.
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \ /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
else \ else \
touch /synapse/requirements.txt; \ touch /synapse/requirements.txt; \
fi fi
### ###
### Stage 1: builder ### Stage 1: builder
### ###
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as builder FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder
# install the OS build deps # install the OS build deps
RUN \ RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \ apt-get update -qq && apt-get install -yqq \
build-essential \ build-essential \
libffi-dev \ libffi-dev \
libjpeg-dev \ libjpeg-dev \
libpq-dev \ libpq-dev \
libssl-dev \ libssl-dev \
libwebp-dev \ libwebp-dev \
libxml++2.6-dev \ libxml++2.6-dev \
libxslt1-dev \ libxslt1-dev \
openssl \ openssl \
zlib1g-dev \ zlib1g-dev \
git \ git \
curl \ curl \
libicu-dev \ && rm -rf /var/lib/apt/lists/*
pkg-config \
&& rm -rf /var/lib/apt/lists/*
# Install rust and ensure its in the PATH # Install rust and ensure its in the PATH
@ -117,13 +106,7 @@ ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
# set to true, so we expose it as a build-arg.
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
# To speed up rebuilds, install all of the dependencies before we copy over # To speed up rebuilds, install all of the dependencies before we copy over
# the whole synapse project, so that this layer in the Docker cache can be # the whole synapse project, so that this layer in the Docker cache can be
@ -138,7 +121,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
COPY synapse /synapse/synapse/ COPY synapse /synapse/synapse/
COPY rust /synapse/rust/ COPY rust /synapse/rust/
# ... and what we need to `pip install`. # ... and what we need to `pip install`.
COPY pyproject.toml README.rst build_rust.py Cargo.toml Cargo.lock /synapse/ COPY pyproject.toml README.rst build_rust.py /synapse/
# Repeat of earlier build argument declaration, as this is a new build stage. # Repeat of earlier build argument declaration, as this is a new build stage.
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
@ -146,19 +129,17 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
# Install the synapse package itself. # Install the synapse package itself.
# If we have populated requirements.txt, we don't install any dependencies # If we have populated requirements.txt, we don't install any dependencies
# as we should already have those from the previous `pip install` step. # as we should already have those from the previous `pip install` step.
RUN --mount=type=cache,target=/synapse/target,sharing=locked \ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \ pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
else \ else \
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \ pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
fi fi
### ###
### Stage 2: runtime ### Stage 2: runtime
### ###
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse' LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md' LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
@ -166,20 +147,19 @@ LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git
LABEL org.opencontainers.image.licenses='Apache-2.0' LABEL org.opencontainers.image.licenses='Apache-2.0'
RUN \ RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \ apt-get update -qq && apt-get install -yqq \
curl \ curl \
gosu \ gosu \
libjpeg62-turbo \ libjpeg62-turbo \
libpq5 \ libpq5 \
libwebp7 \ libwebp6 \
xmlsec1 \ xmlsec1 \
libjemalloc2 \ libjemalloc2 \
libicu72 \ libssl-dev \
libssl-dev \ openssl \
openssl \ && rm -rf /var/lib/apt/lists/*
&& rm -rf /var/lib/apt/lists/*
COPY --from=builder /install /usr/local COPY --from=builder /install /usr/local
COPY ./docker/start.py /start.py COPY ./docker/start.py /start.py
@ -190,4 +170,4 @@ EXPOSE 8008/tcp 8009/tcp 8448/tcp
ENTRYPOINT ["/start.py"] ENTRYPOINT ["/start.py"]
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \ HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
CMD curl -fSs http://localhost:8008/health || exit 1 CMD curl -fSs http://localhost:8008/health || exit 1

View File

@ -24,22 +24,20 @@ ARG distro=""
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but # https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
# it's not obviously easier to use that than to build our own.) # it's not obviously easier to use that than to build our own.)
FROM docker.io/library/${distro} as builder FROM ${distro} as builder
RUN apt-get update -qq -o Acquire::Languages=none RUN apt-get update -qq -o Acquire::Languages=none
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
-yqq --no-install-recommends \ -yqq --no-install-recommends \
build-essential \ build-essential \
ca-certificates \ ca-certificates \
devscripts \ devscripts \
equivs \ equivs \
wget wget
# fetch and unpack the package # fetch and unpack the package
# We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with
# Debian sid. TODO: Switch back to upstream once https://github.com/spotify/dh-virtualenv/pull/354 has merged.
RUN mkdir /dh-virtualenv RUN mkdir /dh-virtualenv
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/matrix-org/dh-virtualenv/archive/refs/tags/matrixorg-2023010302.tar.gz RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/refs/tags/1.2.2.tar.gz
RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz
# install its build deps. We do another apt-cache-update here, because we might # install its build deps. We do another apt-cache-update here, because we might
@ -55,36 +53,38 @@ RUN cd /dh-virtualenv && DEB_BUILD_OPTIONS=nodoc dpkg-buildpackage -us -uc -b
### ###
### Stage 1 ### Stage 1
### ###
FROM docker.io/library/${distro} FROM ${distro}
# Get the distro we want to pull from as a dynamic build variable # Get the distro we want to pull from as a dynamic build variable
# (We need to define it in each build stage) # (We need to define it in each build stage)
ARG distro="" ARG distro=""
ENV distro ${distro} ENV distro ${distro}
# Python < 3.7 assumes LANG="C" means ASCII-only and throws on printing unicode
# http://bugs.python.org/issue19846
ENV LANG C.UTF-8
# Install the build dependencies # Install the build dependencies
# #
# NB: keep this list in sync with the list of build-deps in debian/control # NB: keep this list in sync with the list of build-deps in debian/control
# TODO: it would be nice to do that automatically. # TODO: it would be nice to do that automatically.
RUN apt-get update -qq -o Acquire::Languages=none \ RUN apt-get update -qq -o Acquire::Languages=none \
&& env DEBIAN_FRONTEND=noninteractive apt-get install \ && env DEBIAN_FRONTEND=noninteractive apt-get install \
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \ -yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
build-essential \ build-essential \
curl \ curl \
debhelper \ debhelper \
devscripts \ devscripts \
libsystemd-dev \ libsystemd-dev \
lsb-release \ lsb-release \
pkg-config \ pkg-config \
python3-dev \ python3-dev \
python3-pip \ python3-pip \
python3-setuptools \ python3-setuptools \
python3-venv \ python3-venv \
sqlite3 \ sqlite3 \
libpq-dev \ libpq-dev \
libicu-dev \ xmlsec1
pkg-config \
xmlsec1
# Install rust and ensure it's in the PATH # Install rust and ensure it's in the PATH
ENV RUSTUP_HOME=/rust ENV RUSTUP_HOME=/rust
@ -92,7 +92,7 @@ ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable
COPY --from=builder /dh-virtualenv_1.2.2-1_all.deb / COPY --from=builder /dh-virtualenv_1.2.2-1_all.deb /

View File

@ -1,13 +1,12 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
ARG SYNAPSE_VERSION=latest ARG SYNAPSE_VERSION=latest
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
# first of all, we create a base image with an nginx which we can copy into the # first of all, we create a base image with an nginx which we can copy into the
# target image. For repeated rebuilds, this is much faster than apt installing # target image. For repeated rebuilds, this is much faster than apt installing
# each time. # each time.
FROM docker.io/library/debian:bookworm-slim AS deps_base FROM debian:bullseye-slim AS deps_base
RUN \ RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \
@ -21,10 +20,10 @@ FROM docker.io/library/debian:bookworm-slim AS deps_base
# which makes it much easier to copy (but we need to make sure we use an image # which makes it much easier to copy (but we need to make sure we use an image
# based on the same debian version as the synapse image, to make sure we get # based on the same debian version as the synapse image, to make sure we get
# the expected version of libc. # the expected version of libc.
FROM docker.io/library/redis:7-bookworm AS redis_base FROM redis:6-bullseye AS redis_base
# now build the final image, based on the the regular Synapse docker image # now build the final image, based on the the regular Synapse docker image
FROM $FROM FROM matrixdotorg/synapse:$SYNAPSE_VERSION
# Install supervisord with pip instead of apt, to avoid installing a second # Install supervisord with pip instead of apt, to avoid installing a second
# copy of python. # copy of python.
@ -41,11 +40,7 @@ FROM $FROM
COPY --from=deps_base /etc/nginx /etc/nginx COPY --from=deps_base /etc/nginx /etc/nginx
RUN rm /etc/nginx/sites-enabled/default RUN rm /etc/nginx/sites-enabled/default
RUN mkdir /var/log/nginx /var/lib/nginx RUN mkdir /var/log/nginx /var/lib/nginx
RUN chown www-data /var/lib/nginx RUN chown www-data /var/log/nginx /var/lib/nginx
# have nginx log to stderr/out
RUN ln -sf /dev/stdout /var/log/nginx/access.log
RUN ln -sf /dev/stderr /var/log/nginx/error.log
# Copy Synapse worker, nginx and supervisord configuration template files # Copy Synapse worker, nginx and supervisord configuration template files
COPY ./docker/conf-workers/* /conf/ COPY ./docker/conf-workers/* /conf/

View File

@ -73,8 +73,7 @@ The following environment variables are supported in `generate` mode:
will log sensitive information such as access tokens. will log sensitive information such as access tokens.
This should not be needed unless you are a developer attempting to debug something This should not be needed unless you are a developer attempting to debug something
particularly tricky. particularly tricky.
* `SYNAPSE_LOG_TESTING`: if set, Synapse will log additional information useful
for testing.
## Postgres ## Postgres
@ -242,4 +241,4 @@ healthcheck:
Jemalloc is embedded in the image and will be used instead of the default allocator. Jemalloc is embedded in the image and will be used instead of the default allocator.
You can read about jemalloc by reading the Synapse You can read about jemalloc by reading the Synapse
[Admin FAQ](https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html#help-synapse-is-slow-and-eats-all-my-ramcpu). [README](https://github.com/matrix-org/synapse/blob/HEAD/README.rst#help-synapse-is-slow-and-eats-all-my-ram-cpu).

View File

@ -7,10 +7,8 @@
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse # https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
ARG SYNAPSE_VERSION=latest ARG SYNAPSE_VERSION=latest
# This is an intermediate image, to be built locally (not pulled from a registry).
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
FROM $FROM FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
# First of all, we copy postgres server from the official postgres image, # First of all, we copy postgres server from the official postgres image,
# since for repeated rebuilds, this is much faster than apt installing # since for repeated rebuilds, this is much faster than apt installing
# postgres each time. # postgres each time.
@ -20,8 +18,8 @@ FROM $FROM
# the same debian version as Synapse's docker image (so the versions of the # the same debian version as Synapse's docker image (so the versions of the
# shared libraries match). # shared libraries match).
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin" ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
ENV PGDATA=/var/lib/postgresql/data ENV PGDATA=/var/lib/postgresql/data

View File

@ -6,7 +6,7 @@ set -e
echo "Complement Synapse launcher" echo "Complement Synapse launcher"
echo " Args: $@" echo " Args: $@"
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR" echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS"
function log { function log {
d=$(date +"%Y-%m-%d %H:%M:%S,%3N") d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
@ -45,13 +45,9 @@ esac
if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
# Specify the workers to test with # Specify the workers to test with
# Allow overriding by explicitly setting SYNAPSE_WORKER_TYPES outside, while still export SYNAPSE_WORKER_TYPES="\
# utilizing WORKERS=1 for backwards compatibility. event_persister, \
# -n True if the length of string is non-zero. event_persister, \
# -z True if the length of string is zero.
if [[ -z "$SYNAPSE_WORKER_TYPES" ]]; then
export SYNAPSE_WORKER_TYPES="\
event_persister:2, \
background_worker, \ background_worker, \
frontend_proxy, \ frontend_proxy, \
event_creator, \ event_creator, \
@ -61,18 +57,9 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
federation_reader, \ federation_reader, \
federation_sender, \ federation_sender, \
synchrotron, \ synchrotron, \
client_reader, \
appservice, \ appservice, \
pusher, \ pusher"
stream_writers=account_data+presence+receipts+to_device+typing"
fi
log "Workers requested: $SYNAPSE_WORKER_TYPES"
# adjust connection pool limits on worker mode as otherwise running lots of worker synapses
# can make docker unhappy (in GHA)
export POSTGRES_CP_MIN=1
export POSTGRES_CP_MAX=3
echo "using reduced connection pool limits for worker mode"
# Improve startup times by using a launcher based on fork() # Improve startup times by using a launcher based on fork()
export SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER=1 export SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER=1
else else
@ -81,17 +68,6 @@ else
fi fi
if [[ -n "$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR" ]]; then
if [[ -n "$SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER" ]]; then
export SYNAPSE_COMPLEMENT_FORKING_LAUNCHER_ASYNC_IO_REACTOR="1"
else
export SYNAPSE_ASYNC_IO_REACTOR="1"
fi
else
export SYNAPSE_ASYNC_IO_REACTOR="0"
fi
# Add Complement's appservice registration directory, if there is one # Add Complement's appservice registration directory, if there is one
# (It can be absent when there are no application services in this test!) # (It can be absent when there are no application services in this test!)
if [ -d /complement/appservice ]; then if [ -d /complement/appservice ]; then

View File

@ -12,8 +12,6 @@ trusted_key_servers: []
enable_registration: true enable_registration: true
enable_registration_without_verification: true enable_registration_without_verification: true
bcrypt_rounds: 4 bcrypt_rounds: 4
url_preview_enabled: true
url_preview_ip_range_blacklist: []
## Registration ## ## Registration ##
@ -92,16 +90,18 @@ allow_device_name_lookup_over_federation: true
## Experimental Features ## ## Experimental Features ##
experimental_features: experimental_features:
# Enable spaces support
spaces_enabled: true
# Enable history backfilling support
msc2716_enabled: true
# server-side support for partial state in /send_join responses
msc3706_enabled: true
{% if not workers_in_use %}
# client-side support for partial state in /send_join responses # client-side support for partial state in /send_join responses
faster_joins: true faster_joins: true
# Enable support for polls {% endif %}
msc3381_polls_enabled: true # Enable jump to date endpoint
# Enable deleting device-specific notification settings stored in account data msc3030_enabled: true
msc3890_enabled: true
# Enable removing account data support
msc3391_enabled: true
# Filtering /messages by relation type.
msc3874_enabled: true
server_notices: server_notices:
system_mxid_localpart: _server system_mxid_localpart: _server

View File

@ -35,11 +35,7 @@ server {
# Send all other traffic to the main process # Send all other traffic to the main process
location ~* ^(\\/_matrix|\\/_synapse) { location ~* ^(\\/_matrix|\\/_synapse) {
{% if using_unix_sockets %}
proxy_pass http://unix:/run/main_public.sock;
{% else %}
proxy_pass http://localhost:8080; proxy_pass http://localhost:8080;
{% endif %}
proxy_set_header X-Forwarded-For $remote_addr; proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host; proxy_set_header Host $host;

View File

@ -6,9 +6,6 @@
{% if enable_redis %} {% if enable_redis %}
redis: redis:
enabled: true enabled: true
{% if using_unix_sockets %}
path: /tmp/redis.sock
{% endif %}
{% endif %} {% endif %}
{% if appservice_registrations is not none %} {% if appservice_registrations is not none %}

View File

@ -19,11 +19,7 @@ username=www-data
autorestart=true autorestart=true
[program:redis] [program:redis]
{% if using_unix_sockets %}
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server --unixsocket /tmp/redis.sock
{% else %}
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
{% endif %}
priority=1 priority=1
stdout_logfile=/dev/stdout stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0 stdout_logfile_maxbytes=0

View File

@ -6,13 +6,13 @@
worker_app: "{{ app }}" worker_app: "{{ app }}"
worker_name: "{{ name }}" worker_name: "{{ name }}"
# The replication listener on the main synapse process.
worker_replication_host: 127.0.0.1
worker_replication_http_port: 9093
worker_listeners: worker_listeners:
- type: http - type: http
{% if using_unix_sockets %}
path: "/run/worker.{{ port }}"
{% else %}
port: {{ port }} port: {{ port }}
{% endif %}
{% if listener_resources %} {% if listener_resources %}
resources: resources:
- names: - names:

View File

@ -36,17 +36,12 @@ listeners:
# Allow configuring in case we want to reverse proxy 8008 # Allow configuring in case we want to reverse proxy 8008
# using another process in the same container # using another process in the same container
{% if SYNAPSE_USE_UNIX_SOCKET %}
# Unix sockets don't care about TLS or IP addresses or ports
- path: '/run/main_public.sock'
type: http
{% else %}
- port: {{ SYNAPSE_HTTP_PORT or 8008 }} - port: {{ SYNAPSE_HTTP_PORT or 8008 }}
tls: false tls: false
bind_addresses: ['::'] bind_addresses: ['::']
type: http type: http
x_forwarded: false x_forwarded: false
{% endif %}
resources: resources:
- names: [client] - names: [client]
compress: true compress: true
@ -62,13 +57,10 @@ database:
user: "{{ POSTGRES_USER or "synapse" }}" user: "{{ POSTGRES_USER or "synapse" }}"
password: "{{ POSTGRES_PASSWORD }}" password: "{{ POSTGRES_PASSWORD }}"
database: "{{ POSTGRES_DB or "synapse" }}" database: "{{ POSTGRES_DB or "synapse" }}"
{% if not SYNAPSE_USE_UNIX_SOCKET %}
{# Synapse will use a default unix socket for Postgres when host/port is not specified (behavior from `psycopg2`). #}
host: "{{ POSTGRES_HOST or "db" }}" host: "{{ POSTGRES_HOST or "db" }}"
port: "{{ POSTGRES_PORT or "5432" }}" port: "{{ POSTGRES_PORT or "5432" }}"
{% endif %} cp_min: 5
cp_min: {{ POSTGRES_CP_MIN or 5 }} cp_max: 10
cp_max: {{ POSTGRES_CP_MAX or 10 }}
{% else %} {% else %}
database: database:
name: "sqlite3" name: "sqlite3"

View File

@ -49,35 +49,17 @@ handlers:
class: logging.StreamHandler class: logging.StreamHandler
formatter: precise formatter: precise
{% if not SYNAPSE_LOG_SENSITIVE %}
{#
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
so that DEBUG entries (containing sensitive information) are not emitted.
#}
loggers: loggers:
# This is just here so we can leave `loggers` in the config regardless of whether
# we configure other loggers below (avoid empty yaml dict error).
_placeholder:
level: "INFO"
{% if not SYNAPSE_LOG_SENSITIVE %}
{#
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
so that DEBUG entries (containing sensitive information) are not emitted.
#}
synapse.storage.SQL: synapse.storage.SQL:
# beware: increasing this to DEBUG will make synapse log sensitive # beware: increasing this to DEBUG will make synapse log sensitive
# information such as access tokens. # information such as access tokens.
level: INFO level: INFO
{% endif %} {% endif %}
{% if SYNAPSE_LOG_TESTING %}
{#
If Synapse is under test, log a few more useful things for a developer
attempting to debug something particularly tricky.
With `synapse.visibility.filtered_event_debug`, it logs when events are (maybe
unexpectedly) filtered out of responses in tests. It's just nice to be able to
look at the CI log and figure out why an event isn't being returned.
#}
synapse.visibility.filtered_event_debug:
level: DEBUG
{% endif %}
root: root:
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }} level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}

File diff suppressed because it is too large Load Diff

View File

@ -1,75 +0,0 @@
# syntax=docker/dockerfile:1
# This dockerfile builds an editable install of Synapse.
#
# Used by `complement.sh`. Not suitable for production use.
ARG PYTHON_VERSION=3.9
###
### Stage 0: generate requirements.txt
###
# We hardcode the use of Debian bookworm here because this could change upstream
# and other Dockerfiles used for testing are expecting bookworm.
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
# Install Rust and other dependencies (stolen from normal Dockerfile)
# install the OS build deps
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \
build-essential \
libffi-dev \
libjpeg-dev \
libpq-dev \
libssl-dev \
libwebp-dev \
libxml++2.6-dev \
libxslt1-dev \
openssl \
zlib1g-dev \
git \
curl \
gosu \
libjpeg62-turbo \
libpq5 \
libwebp7 \
xmlsec1 \
libjemalloc2 \
&& rm -rf /var/lib/apt/lists/*
ENV RUSTUP_HOME=/rust
ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
# Make a base copy of the editable source tree, so that we have something to
# install and build now — even though it's going to be covered up by a mount
# at runtime.
COPY synapse /editable-src/synapse/
COPY rust /editable-src/rust/
# ... and what we need to `pip install`.
COPY pyproject.toml poetry.lock README.rst build_rust.py Cargo.toml Cargo.lock /editable-src/
RUN pip install poetry
RUN poetry config virtualenvs.create false
RUN cd /editable-src && poetry install --extras all
# Make copies of useful things for inspection:
# - the Rust module (must be copied to the editable source tree before startup)
# - poetry.lock is useful for checking if dependencies have changed.
RUN cp /editable-src/synapse/synapse_rust.abi3.so /synapse_rust.abi3.so.bak
RUN cp /editable-src/poetry.lock /poetry.lock.bak
### Extra setup from original Dockerfile
COPY ./docker/start.py /start.py
COPY ./docker/conf /conf
EXPOSE 8008/tcp 8009/tcp 8448/tcp
ENTRYPOINT ["/start.py"]
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
CMD curl -fSs http://localhost:8008/health || exit 1

View File

@ -13,19 +13,14 @@ import jinja2
# Utility functions # Utility functions
def log(txt: str) -> None: def log(txt: str) -> None:
print(txt) print(txt, file=sys.stderr)
def error(txt: str) -> NoReturn: def error(txt: str) -> NoReturn:
print(txt, file=sys.stderr) log(txt)
sys.exit(2) sys.exit(2)
def flush_buffers() -> None:
sys.stdout.flush()
sys.stderr.flush()
def convert(src: str, dst: str, environ: Mapping[str, object]) -> None: def convert(src: str, dst: str, environ: Mapping[str, object]) -> None:
"""Generate a file from a template """Generate a file from a template
@ -82,7 +77,7 @@ def generate_config_from_template(
with open(filename) as handle: with open(filename) as handle:
value = handle.read() value = handle.read()
else: else:
log(f"Generating a random secret for {secret}") log("Generating a random secret for {}".format(secret))
value = codecs.encode(os.urandom(32), "hex").decode() value = codecs.encode(os.urandom(32), "hex").decode()
with open(filename, "w") as handle: with open(filename, "w") as handle:
handle.write(value) handle.write(value)
@ -136,10 +131,10 @@ def generate_config_from_template(
if ownership is not None: if ownership is not None:
log(f"Setting ownership on /data to {ownership}") log(f"Setting ownership on /data to {ownership}")
subprocess.run(["chown", "-R", ownership, "/data"], check=True) subprocess.check_output(["chown", "-R", ownership, "/data"])
args = ["gosu", ownership] + args args = ["gosu", ownership] + args
subprocess.run(args, check=True) subprocess.check_output(args)
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None: def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
@ -163,7 +158,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
if ownership is not None: if ownership is not None:
# make sure that synapse has perms to write to the data dir. # make sure that synapse has perms to write to the data dir.
log(f"Setting ownership on {data_dir} to {ownership}") log(f"Setting ownership on {data_dir} to {ownership}")
subprocess.run(["chown", ownership, data_dir], check=True) subprocess.check_output(["chown", ownership, data_dir])
# create a suitable log config from our template # create a suitable log config from our template
log_config_file = "%s/%s.log.config" % (config_dir, server_name) log_config_file = "%s/%s.log.config" % (config_dir, server_name)
@ -190,7 +185,6 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
"--open-private-ports", "--open-private-ports",
] ]
# log("running %s" % (args, )) # log("running %s" % (args, ))
flush_buffers()
os.execv(sys.executable, args) os.execv(sys.executable, args)
@ -239,7 +233,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
log("Could not find %s, will not use" % (jemallocpath,)) log("Could not find %s, will not use" % (jemallocpath,))
# if there are no config files passed to synapse, try adding the default file # if there are no config files passed to synapse, try adding the default file
if not any(p.startswith(("--config-path", "-c")) for p in args): if not any(p.startswith("--config-path") or p.startswith("-c") for p in args):
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data") config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get( config_path = environ.get(
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml" "SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
@ -273,10 +267,8 @@ running with 'migrate_config'. See the README for more details.
args = [sys.executable] + args args = [sys.executable] + args
if ownership is not None: if ownership is not None:
args = ["gosu", ownership] + args args = ["gosu", ownership] + args
flush_buffers()
os.execve("/usr/sbin/gosu", args, environ) os.execve("/usr/sbin/gosu", args, environ)
else: else:
flush_buffers()
os.execve(sys.executable, args, environ) os.execve(sys.executable, args, environ)

View File

@ -9,8 +9,6 @@
- [Configuring a Reverse Proxy](reverse_proxy.md) - [Configuring a Reverse Proxy](reverse_proxy.md)
- [Configuring a Forward/Outbound Proxy](setup/forward_proxy.md) - [Configuring a Forward/Outbound Proxy](setup/forward_proxy.md)
- [Configuring a Turn Server](turn-howto.md) - [Configuring a Turn Server](turn-howto.md)
- [coturn TURN server](setup/turn/coturn.md)
- [eturnal TURN server](setup/turn/eturnal.md)
- [Delegation](delegate.md) - [Delegation](delegate.md)
# Upgrading # Upgrading
@ -19,7 +17,7 @@
# Usage # Usage
- [Federation](federate.md) - [Federation](federate.md)
- [Configuration](usage/configuration/README.md) - [Configuration](usage/configuration/README.md)
- [Configuration Manual](usage/configuration/config_documentation.md) - [Configuration Manual](usage/configuration/config_documentation.md)
- [Homeserver Sample Config File](usage/configuration/homeserver_sample_config.md) - [Homeserver Sample Config File](usage/configuration/homeserver_sample_config.md)
- [Logging Sample Config File](usage/configuration/logging_sample_config.md) - [Logging Sample Config File](usage/configuration/logging_sample_config.md)
- [Structured Logging](structured_logging.md) - [Structured Logging](structured_logging.md)
@ -48,7 +46,6 @@
- [Password auth provider callbacks](modules/password_auth_provider_callbacks.md) - [Password auth provider callbacks](modules/password_auth_provider_callbacks.md)
- [Background update controller callbacks](modules/background_update_controller_callbacks.md) - [Background update controller callbacks](modules/background_update_controller_callbacks.md)
- [Account data callbacks](modules/account_data_callbacks.md) - [Account data callbacks](modules/account_data_callbacks.md)
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md) - [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
- [Workers](workers.md) - [Workers](workers.md)
- [Using `synctl` with Workers](synctl_workers.md) - [Using `synctl` with Workers](synctl_workers.md)
@ -58,7 +55,6 @@
- [Account Validity](admin_api/account_validity.md) - [Account Validity](admin_api/account_validity.md)
- [Background Updates](usage/administration/admin_api/background_updates.md) - [Background Updates](usage/administration/admin_api/background_updates.md)
- [Event Reports](admin_api/event_reports.md) - [Event Reports](admin_api/event_reports.md)
- [Experimental Features](admin_api/experimental_features.md)
- [Media](admin_api/media_admin_api.md) - [Media](admin_api/media_admin_api.md)
- [Purge History](admin_api/purge_history_api.md) - [Purge History](admin_api/purge_history_api.md)
- [Register Users](admin_api/register_api.md) - [Register Users](admin_api/register_api.md)
@ -98,9 +94,7 @@
- [Cancellation](development/synapse_architecture/cancellation.md) - [Cancellation](development/synapse_architecture/cancellation.md)
- [Log Contexts](log_contexts.md) - [Log Contexts](log_contexts.md)
- [Replication](replication.md) - [Replication](replication.md)
- [Streams](development/synapse_architecture/streams.md)
- [TCP Replication](tcp_replication.md) - [TCP Replication](tcp_replication.md)
- [Faster remote joins](development/synapse_architecture/faster_joins.md)
- [Internal Documentation](development/internal_documentation/README.md) - [Internal Documentation](development/internal_documentation/README.md)
- [Single Sign-On]() - [Single Sign-On]()
- [SAML](development/saml.md) - [SAML](development/saml.md)

View File

@ -1,13 +1,11 @@
# Account validity API # Account validity API
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
This API allows a server administrator to manage the validity of an account. To This API allows a server administrator to manage the validity of an account. To
use it, you must enable the account validity feature (under use it, you must enable the account validity feature (under
`account_validity`) in Synapse's configuration. `account_validity`) in Synapse's configuration.
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api).
## Renew account ## Renew account

View File

@ -3,7 +3,7 @@
This API returns information about reported events. This API returns information about reported events.
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api).
The api is: The api is:
``` ```
@ -169,17 +169,3 @@ The following fields are returned in the JSON response body:
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not * `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
have a canonical alias set. have a canonical alias set.
* `event_json`: object - Details of the original event that was reported. * `event_json`: object - Details of the original event that was reported.
# Delete a specific event report
This API deletes a specific event report. If the request is successful, the response body
will be an empty JSON object.
The api is:
```
DELETE /_synapse/admin/v1/event_reports/<report_id>
```
**URL parameters:**
* `report_id`: string - The ID of the event report.

View File

@ -1,55 +0,0 @@
# Experimental Features API
This API allows a server administrator to enable or disable some experimental features on a per-user
basis. The currently supported features are:
- [MSC3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy
presence state enabled
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
for another client
- [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require
UIA when first uploading cross-signing keys.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/).
## Enabling/Disabling Features
This API allows a server administrator to enable experimental features for a given user. The request must
provide a body containing the user id and listing the features to enable/disable in the following format:
```json
{
"features": {
"msc3026":true,
"msc3881":true
}
}
```
where true is used to enable the feature, and false is used to disable the feature.
The API is:
```
PUT /_synapse/admin/v1/experimental_features/<user_id>
```
## Listing Enabled Features
To list which features are enabled/disabled for a given user send a request to the following API:
```
GET /_synapse/admin/v1/experimental_features/<user_id>
```
It will return a list of possible features and indicate whether they are enabled or disabled for the
user like so:
```json
{
"features": {
"msc3026": true,
"msc3881": false,
"msc3967": false
}
}
```

View File

@ -6,7 +6,7 @@ Details about the format of the `media_id` and storage of the media in the file
are documented under [media repository](../media_repository.md). are documented under [media repository](../media_repository.md).
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api).
## List all media in a room ## List all media in a room
@ -235,14 +235,6 @@ The following fields are returned in the JSON response body:
Request: Request:
```
POST /_synapse/admin/v1/media/delete?before_ts=<before_ts>
{}
```
*Deprecated in Synapse v1.78.0:* This API is available at the deprecated endpoint:
``` ```
POST /_synapse/admin/v1/media/<server_name>/delete?before_ts=<before_ts> POST /_synapse/admin/v1/media/<server_name>/delete?before_ts=<before_ts>
@ -251,7 +243,7 @@ POST /_synapse/admin/v1/media/<server_name>/delete?before_ts=<before_ts>
URL Parameters URL Parameters
* `server_name`: string - The name of your local server (e.g `matrix.org`). *Deprecated in Synapse v1.78.0.* * `server_name`: string - The name of your local server (e.g `matrix.org`).
* `before_ts`: string representing a positive integer - Unix timestamp in milliseconds. * `before_ts`: string representing a positive integer - Unix timestamp in milliseconds.
Files that were last used before this timestamp will be deleted. It is the timestamp of Files that were last used before this timestamp will be deleted. It is the timestamp of
last access, not the timestamp when the file was created. last access, not the timestamp when the file was created.

View File

@ -11,7 +11,7 @@ Note that Synapse requires at least one message in each room, so it will never
delete the last message in a room. delete the last message in a room.
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api).
The API is: The API is:

View File

@ -1,7 +1,5 @@
# Shared-Secret Registration # Shared-Secret Registration
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
This API allows for the creation of users in an administrative and This API allows for the creation of users in an administrative and
non-interactive way. This is generally used for bootstrapping a Synapse non-interactive way. This is generally used for bootstrapping a Synapse
instance with administrator accounts. instance with administrator accounts.

View File

@ -6,7 +6,7 @@ local users. The server administrator must be in the room and have permission to
invite users. invite users.
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api).
## Parameters ## Parameters

View File

@ -5,7 +5,7 @@ server. There are various parameters available that allow for filtering and
sorting the returned list. This API supports pagination. sorting the returned list. This API supports pagination.
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api).
**Parameters** **Parameters**
@ -400,7 +400,7 @@ sent to a room in a given timeframe. There are various parameters available
that allow for filtering and ordering the returned list. This API supports pagination. that allow for filtering and ordering the returned list. This API supports pagination.
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api).
This endpoint mirrors the [Matrix Spec defined Messages API](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages). This endpoint mirrors the [Matrix Spec defined Messages API](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages).
@ -419,7 +419,7 @@ The following query parameters are available:
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch * `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint. or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
* `to` - The token to stop returning events at. * `to` - The token to spot returning events at.
* `limit` - The maximum number of events to return. Defaults to `10`. * `limit` - The maximum number of events to return. Defaults to `10`.
* `filter` - A JSON RoomEventFilter to filter returned events with. * `filter` - A JSON RoomEventFilter to filter returned events with.
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting * `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
@ -536,8 +536,7 @@ The following query parameters are available:
**Response** **Response**
* `event_id` - The event ID closest to the given timestamp. * `event_id` - converted from timestamp
* `origin_server_ts` - The timestamp of the event in milliseconds since the Unix epoch.
# Block Room API # Block Room API
The Block Room admin API allows server admins to block and unblock rooms, The Block Room admin API allows server admins to block and unblock rooms,

View File

@ -4,7 +4,7 @@ Returns information about all local media usage of users. Gives the
possibility to filter them by time and user. possibility to filter them by time and user.
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api).
The API is: The API is:
@ -81,52 +81,3 @@ The following fields are returned in the JSON response body:
- `user_id` - string - Fully-qualified user ID (ex. `@user:server.com`). - `user_id` - string - Fully-qualified user ID (ex. `@user:server.com`).
* `next_token` - integer - Opaque value used for pagination. See above. * `next_token` - integer - Opaque value used for pagination. See above.
* `total` - integer - Total number of users after filtering. * `total` - integer - Total number of users after filtering.
# Get largest rooms by size in database
Returns the 10 largest rooms and an estimate of how much space in the database
they are taking.
This does not include the size of any associated media associated with the room.
Returns an error on SQLite.
*Note:* This uses the planner statistics from PostgreSQL to do the estimates,
which means that the returned information can vary widely from reality. However,
it should be enough to get a rough idea of where database disk space is going.
The API is:
```
GET /_synapse/admin/v1/statistics/database/rooms
```
A response body like the following is returned:
```json
{
"rooms": [
{
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
"estimated_size": 47325417353
}
],
}
```
**Response**
The following fields are returned in the JSON response body:
* `rooms` - An array of objects, sorted by largest room first. Objects contain
the following fields:
- `room_id` - string - The room ID.
- `estimated_size` - integer - Estimated disk space used in bytes by the room
in the database.
*Added in Synapse 1.83.0*

View File

@ -1,7 +1,7 @@
# User Admin API # User Admin API
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api).
## Query User Account ## Query User Account
@ -37,7 +37,6 @@ It returns a JSON body like the following:
"is_guest": 0, "is_guest": 0,
"admin": 0, "admin": 0,
"deactivated": 0, "deactivated": 0,
"erased": false,
"shadow_banned": 0, "shadow_banned": 0,
"creation_ts": 1560432506, "creation_ts": 1560432506,
"appservice_id": null, "appservice_id": null,
@ -54,8 +53,7 @@ It returns a JSON body like the following:
"external_id": "<user_id_provider_2>" "external_id": "<user_id_provider_2>"
} }
], ],
"user_type": null, "user_type": null
"locked": false
} }
``` ```
@ -63,7 +61,7 @@ URL parameters:
- `user_id`: fully-qualified user id: for example, `@user:server.com`. - `user_id`: fully-qualified user id: for example, `@user:server.com`.
## Create or modify account ## Create or modify Account
This API allows an administrator to create or modify a user account with a This API allows an administrator to create or modify a user account with a
specific `user_id`. specific `user_id`.
@ -79,33 +77,31 @@ with a body of:
```json ```json
{ {
"password": "user_password", "password": "user_password",
"logout_devices": false, "displayname": "User",
"displayname": "Alice Marigold",
"avatar_url": "mxc://example.com/abcde12345",
"threepids": [ "threepids": [
{ {
"medium": "email", "medium": "email",
"address": "alice@example.com" "address": "<user_mail_1>"
}, },
{ {
"medium": "email", "medium": "email",
"address": "alice@domain.org" "address": "<user_mail_2>"
} }
], ],
"external_ids": [ "external_ids": [
{ {
"auth_provider": "example", "auth_provider": "<provider1>",
"external_id": "12345" "external_id": "<user_id_provider_1>"
}, },
{ {
"auth_provider": "example2", "auth_provider": "<provider2>",
"external_id": "abc54321" "external_id": "<user_id_provider_2>"
} }
], ],
"avatar_url": "<avatar_url>",
"admin": false, "admin": false,
"deactivated": false, "deactivated": false,
"user_type": null, "user_type": null
"locked": false
} }
``` ```
@ -115,52 +111,41 @@ Returns HTTP status code:
URL parameters: URL parameters:
- `user_id` - A fully-qualified user id. For example, `@user:server.com`. - `user_id`: fully-qualified user id: for example, `@user:server.com`.
Body parameters: Body parameters:
- `password` - **string**, optional. If provided, the user's password is updated and all - `password` - string, optional. If provided, the user's password is updated and all
devices are logged out, unless `logout_devices` is set to `false`. devices are logged out, unless `logout_devices` is set to `false`.
- `logout_devices` - **bool**, optional, defaults to `true`. If set to `false`, devices aren't - `logout_devices` - bool, optional, defaults to `true`. If set to false, devices aren't
logged out even when `password` is provided. logged out even when `password` is provided.
- `displayname` - **string**, optional. If set to an empty string (`""`), the user's display name - `displayname` - string, optional, defaults to the value of `user_id`.
will be removed. - `threepids` - array, optional, allows setting the third-party IDs (email, msisdn)
- `avatar_url` - **string**, optional. Must be a - `medium` - string. Kind of third-party ID, either `email` or `msisdn`.
[MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris). - `address` - string. Value of third-party ID.
If set to an empty string (`""`), the user's avatar is removed. belonging to a user.
- `threepids` - **array**, optional. If provided, the user's third-party IDs (email, msisdn) are - `external_ids` - array, optional. Allow setting the identifier of the external identity
entirely replaced with the given list. Each item in the array is an object with the following provider for SSO (Single sign-on). Details in the configuration manual under the
fields:
- `medium` - **string**, required. The type of third-party ID, either `email` or `msisdn` (phone number).
- `address` - **string**, required. The third-party ID itself, e.g. `alice@example.com` for `email` or
`447470274584` (for a phone number with country code "44") and `19254857364` (for a phone number
with country code "1") for `msisdn`.
Note: If a threepid is removed from a user via this option, Synapse will also attempt to remove
that threepid from any identity servers it is aware has a binding for it.
- `external_ids` - **array**, optional. Allow setting the identifier of the external identity
provider for SSO (Single sign-on). More details are in the configuration manual under the
sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers). sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers).
- `auth_provider` - **string**, required. The unique, internal ID of the external identity provider. - `auth_provider` - string. ID of the external identity provider. Value of `idp_id`
The same as `idp_id` from the homeserver configuration. Note that no error is raised if the in the homeserver configuration. Note that no error is raised if the provided
provided value is not in the homeserver configuration. value is not in the homeserver configuration.
- `external_id` - **string**, required. An identifier for the user in the external identity provider. - `external_id` - string, user ID in the external identity provider.
When the user logs in to the identity provider, this must be the unique ID that they map to. - `avatar_url` - string, optional, must be a
- `admin` - **bool**, optional, defaults to `false`. Whether the user is a homeserver administrator, [MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris).
granting them access to the Admin API, among other things. - `admin` - bool, optional, defaults to `false`.
- `deactivated` - **bool**, optional. If unspecified, deactivation state will be left unchanged. - `deactivated` - bool, optional. If unspecified, deactivation state will be left
unchanged on existing accounts and set to `false` for new accounts.
A user cannot be erased by deactivating with this API. For details on
deactivating users see [Deactivate Account](#deactivate-account).
- `user_type` - string or null, optional. If provided, the user type will be
adjusted. If `null` given, the user type will be cleared. Other
allowed options are: `bot` and `support`.
Note: the `password` field must also be set if both of the following are true: If the user already exists then optional parameters default to the current value.
- `deactivated` is set to `false` and the user was previously deactivated (you are reactivating this user)
- Users are allowed to set their password on this homeserver (both `password_config.enabled` and
`password_config.localdb_enabled` config options are set to `true`).
Users' passwords are wiped upon account deactivation, hence the need to set a new one here.
Note: a user cannot be erased with this API. For more details on In order to re-activate an account `deactivated` must be set to `false`. If
deactivating and erasing users see [Deactivate Account](#deactivate-account). users do not login via single-sign-on, a new `password` must be provided.
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
- `user_type` - **string** or null, optional. If not provided, the user type will be
not be changed. If `null` is given, the user type will be cleared.
Other allowed options are: `bot` and `support`.
## List Accounts ## List Accounts
@ -182,24 +167,20 @@ A response body like the following is returned:
"admin": 0, "admin": 0,
"user_type": null, "user_type": null,
"deactivated": 0, "deactivated": 0,
"erased": false,
"shadow_banned": 0, "shadow_banned": 0,
"displayname": "<User One>", "displayname": "<User One>",
"avatar_url": null, "avatar_url": null,
"creation_ts": 1560432668000, "creation_ts": 1560432668000
"locked": false
}, { }, {
"name": "<user_id2>", "name": "<user_id2>",
"is_guest": 0, "is_guest": 0,
"admin": 1, "admin": 1,
"user_type": null, "user_type": null,
"deactivated": 0, "deactivated": 0,
"erased": false,
"shadow_banned": 0, "shadow_banned": 0,
"displayname": "<User Two>", "displayname": "<User Two>",
"avatar_url": "<avatar_url>", "avatar_url": "<avatar_url>",
"creation_ts": 1561550621000, "creation_ts": 1561550621000
"locked": false
} }
], ],
"next_token": "100", "next_token": "100",
@ -222,9 +203,7 @@ The following parameters should be set in the URL:
- `name` - Is optional and filters to only return users with user ID localparts - `name` - Is optional and filters to only return users with user ID localparts
**or** displaynames that contain this value. **or** displaynames that contain this value.
- `guests` - string representing a bool - Is optional and if `false` will **exclude** guest users. - `guests` - string representing a bool - Is optional and if `false` will **exclude** guest users.
Defaults to `true` to include guest users. This parameter is not supported when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582) Defaults to `true` to include guest users.
- `admins` - Optional flag to filter admins. If `true`, only admins are queried. If `false`, admins are excluded from
the query. When the flag is absent (the default), **both** admins and non-admins are included in the search results.
- `deactivated` - string representing a bool - Is optional and if `true` will **include** deactivated users. - `deactivated` - string representing a bool - Is optional and if `true` will **include** deactivated users.
Defaults to `false` to exclude deactivated users. Defaults to `false` to exclude deactivated users.
- `limit` - string representing a positive integer - Is optional but is used for pagination, - `limit` - string representing a positive integer - Is optional but is used for pagination,
@ -246,15 +225,9 @@ The following parameters should be set in the URL:
- `displayname` - Users are ordered alphabetically by `displayname`. - `displayname` - Users are ordered alphabetically by `displayname`.
- `avatar_url` - Users are ordered alphabetically by avatar URL. - `avatar_url` - Users are ordered alphabetically by avatar URL.
- `creation_ts` - Users are ordered by when the users was created in ms. - `creation_ts` - Users are ordered by when the users was created in ms.
- `last_seen_ts` - Users are ordered by when the user was lastly seen in ms.
- `dir` - Direction of media order. Either `f` for forwards or `b` for backwards. - `dir` - Direction of media order. Either `f` for forwards or `b` for backwards.
Setting this value to `b` will reverse the above sort order. Defaults to `f`. Setting this value to `b` will reverse the above sort order. Defaults to `f`.
- `not_user_type` - Exclude certain user types, such as bot users, from the request.
Can be provided multiple times. Possible values are `bot`, `support` or "empty string".
"empty string" here means to exclude users without a type.
- `locked` - string representing a bool - Is optional and if `true` will **include** locked users.
Defaults to `false` to exclude locked users. Note: Introduced in v1.93.
Caution. The database only has indexes on the columns `name` and `creation_ts`. Caution. The database only has indexes on the columns `name` and `creation_ts`.
This means that if a different sort order is used (`is_guest`, `admin`, This means that if a different sort order is used (`is_guest`, `admin`,
@ -274,17 +247,14 @@ The following fields are returned in the JSON response body:
- `user_type` - string - Type of the user. Normal users are type `None`. - `user_type` - string - Type of the user. Normal users are type `None`.
This allows user type specific behaviour. There are also types `support` and `bot`. This allows user type specific behaviour. There are also types `support` and `bot`.
- `deactivated` - bool - Status if that user has been marked as deactivated. - `deactivated` - bool - Status if that user has been marked as deactivated.
- `erased` - bool - Status if that user has been marked as erased.
- `shadow_banned` - bool - Status if that user has been marked as shadow banned. - `shadow_banned` - bool - Status if that user has been marked as shadow banned.
- `displayname` - string - The user's display name if they have set one. - `displayname` - string - The user's display name if they have set one.
- `avatar_url` - string - The user's avatar URL if they have set one. - `avatar_url` - string - The user's avatar URL if they have set one.
- `creation_ts` - integer - The user's creation timestamp in ms. - `creation_ts` - integer - The user's creation timestamp in ms.
- `last_seen_ts` - integer - The user's last activity timestamp in ms.
- `locked` - bool - Status if that user has been marked as locked. Note: Introduced in v1.93.
- `next_token`: string representing a positive integer - Indication for pagination. See above. - `next_token`: string representing a positive integer - Indication for pagination. See above.
- `total` - integer - Total number of media. - `total` - integer - Total number of media.
*Added in Synapse 1.93:* the `locked` query parameter and response field.
## Query current sessions for a user ## Query current sessions for a user
@ -399,8 +369,6 @@ The following actions are **NOT** performed. The list may be incomplete.
## Reset password ## Reset password
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
Changes the password of another user. This will automatically log the user out of all their devices. Changes the password of another user. This will automatically log the user out of all their devices.
The api is: The api is:
@ -424,8 +392,6 @@ The parameter `logout_devices` is optional and defaults to `true`.
## Get whether a user is a server administrator or not ## Get whether a user is a server administrator or not
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
The api is: The api is:
``` ```
@ -443,8 +409,6 @@ A response body like the following is returned:
## Change whether a user is a server administrator or not ## Change whether a user is a server administrator or not
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
Note that you cannot demote yourself. Note that you cannot demote yourself.
The api is: The api is:
@ -618,16 +582,6 @@ A response body like the following is returned:
"quarantined_by": null, "quarantined_by": null,
"safe_from_quarantine": false, "safe_from_quarantine": false,
"upload_name": "test2.png" "upload_name": "test2.png"
},
{
"created_ts": 300400,
"last_access_ts": 300700,
"media_id": "BzYNLRUgGHphBkdKGbzXwbjX",
"media_length": 1337,
"media_type": "application/octet-stream",
"quarantined_by": null,
"safe_from_quarantine": false,
"upload_name": null
} }
], ],
"next_token": 3, "next_token": 3,
@ -689,17 +643,16 @@ The following fields are returned in the JSON response body:
- `media` - An array of objects, each containing information about a media. - `media` - An array of objects, each containing information about a media.
Media objects contain the following fields: Media objects contain the following fields:
- `created_ts` - integer - Timestamp when the content was uploaded in ms. - `created_ts` - integer - Timestamp when the content was uploaded in ms.
- `last_access_ts` - integer or null - Timestamp when the content was last accessed in ms. - `last_access_ts` - integer - Timestamp when the content was last accessed in ms.
Null if there was no access, yet.
- `media_id` - string - The id used to refer to the media. Details about the format - `media_id` - string - The id used to refer to the media. Details about the format
are documented under are documented under
[media repository](../media_repository.md). [media repository](../media_repository.md).
- `media_length` - integer - Length of the media in bytes. - `media_length` - integer - Length of the media in bytes.
- `media_type` - string - The MIME-type of the media. - `media_type` - string - The MIME-type of the media.
- `quarantined_by` - string or null - The user ID that initiated the quarantine request - `quarantined_by` - string - The user ID that initiated the quarantine request
for this media. Null if not quarantined. for this media.
- `safe_from_quarantine` - bool - Status if this media is safe from quarantining. - `safe_from_quarantine` - bool - Status if this media is safe from quarantining.
- `upload_name` - string or null - The name the media was uploaded with. Null if not provided during upload. - `upload_name` - string - The name the media was uploaded with.
- `next_token`: integer - Indication for pagination. See above. - `next_token`: integer - Indication for pagination. See above.
- `total` - integer - Total number of media. - `total` - integer - Total number of media.
@ -749,8 +702,6 @@ delete largest/smallest or newest/oldest files first.
## Login as a user ## Login as a user
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
Get an access token that can be used to authenticate as that user. Useful for Get an access token that can be used to authenticate as that user. Useful for
when admins wish to do actions on behalf of a user. when admins wish to do actions on behalf of a user.
@ -763,8 +714,7 @@ POST /_synapse/admin/v1/users/<user_id>/login
An optional `valid_until_ms` field can be specified in the request body as an An optional `valid_until_ms` field can be specified in the request body as an
integer timestamp that specifies when the token should expire. By default tokens integer timestamp that specifies when the token should expire. By default tokens
do not expire. Note that this API does not allow a user to login as themselves do not expire.
(to create more tokens).
A response body like the following is returned: A response body like the following is returned:
@ -784,43 +734,6 @@ Note: The token will expire if the *admin* user calls `/logout/all` from any
of their devices, but the token will *not* expire if the target user does the of their devices, but the token will *not* expire if the target user does the
same. same.
## Allow replacing master cross-signing key without User-Interactive Auth
This endpoint is not intended for server administrator usage;
we describe it here for completeness.
This API temporarily permits a user to replace their master cross-signing key
without going through
[user-interactive authentication](https://spec.matrix.org/v1.8/client-server-api/#user-interactive-authentication-api) (UIA).
This is useful when Synapse has delegated its authentication to the
[Matrix Authentication Service](https://github.com/matrix-org/matrix-authentication-service/);
as Synapse cannot perform UIA is not possible in these circumstances.
The API is
```http request
POST /_synapse/admin/v1/users/<user_id>/_allow_cross_signing_replacement_without_uia
{}
```
If the user does not exist, or does exist but has no master cross-signing key,
this will return with status code `404 Not Found`.
Otherwise, a response body like the following is returned, with status `200 OK`:
```json
{
"updatable_without_uia_before_ms": 1234567890
}
```
The response body is a JSON object with a single field:
- `updatable_without_uia_before_ms`: integer. The timestamp in milliseconds
before which the user is permitted to replace their cross-signing key without
going through UIA.
_Added in Synapse 1.97.0._
## User devices ## User devices
@ -885,33 +798,6 @@ The following fields are returned in the JSON response body:
- `total` - Total number of user's devices. - `total` - Total number of user's devices.
### Create a device
Creates a new device for a specific `user_id` and `device_id`. Does nothing if the `device_id`
exists already.
The API is:
```
POST /_synapse/admin/v2/users/<user_id>/devices
{
"device_id": "QBUAZIFURK"
}
```
An empty JSON dict is returned.
**Parameters**
The following parameters should be set in the URL:
- `user_id` - fully qualified: for example, `@user:server.com`.
The following fields are required in the JSON request body:
- `device_id` - The device ID to create.
### Delete multiple devices ### Delete multiple devices
Deletes the given devices for a specific `user_id`, and invalidates Deletes the given devices for a specific `user_id`, and invalidates
any access token associated with them. any access token associated with them.
@ -1252,7 +1138,7 @@ The following parameters should be set in the URL:
- `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must - `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must
be local. be local.
## Check username availability ### Check username availability
Checks to see if a username is available, and valid, for the server. See [the client-server Checks to see if a username is available, and valid, for the server. See [the client-server
API](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available)
@ -1270,7 +1156,7 @@ GET /_synapse/admin/v1/username_available?username=$localpart
The request and response format is the same as the The request and response format is the same as the
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API. [/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
## Find a user based on their ID in an auth provider ### Find a user based on their ID in an auth provider
The API is: The API is:
@ -1307,42 +1193,3 @@ Returns a `404` HTTP status code if no user was found, with a response body like
``` ```
_Added in Synapse 1.68.0._ _Added in Synapse 1.68.0._
## Find a user based on their Third Party ID (ThreePID or 3PID)
The API is:
```
GET /_synapse/admin/v1/threepid/$medium/users/$address
```
When a user matched the given address for the given medium, an HTTP code `200` with a response body like the following is returned:
```json
{
"user_id": "@hello:example.org"
}
```
**Parameters**
The following parameters should be set in the URL:
- `medium` - Kind of third-party ID, either `email` or `msisdn`.
- `address` - Value of the third-party ID.
The `address` may have characters that are not URL-safe, so it is advised to URL-encode those parameters.
**Errors**
Returns a `404` HTTP status code if no user was found, with a response body like this:
```json
{
"errcode":"M_NOT_FOUND",
"error":"User not found"
}
```
_Added in Synapse 1.72.0._

View File

@ -1,7 +1,7 @@
# Version API # Version API
This API returns the running Synapse version. This API returns the running Synapse version and the Python version
This is useful when a Synapse instance on which Synapse is being run. This is useful when a Synapse instance
is behind a proxy that does not forward the 'Server' header (which also is behind a proxy that does not forward the 'Server' header (which also
contains Synapse version information). contains Synapse version information).
@ -15,9 +15,7 @@ It returns a JSON body like the following:
```json ```json
{ {
"server_version": "0.99.2rc1 (b=develop, abcdef123)" "server_version": "0.99.2rc1 (b=develop, abcdef123)",
"python_version": "3.7.8"
} }
``` ```
*Changed in Synapse 1.94.0:* The `python_version` key was removed from the
response body.

View File

@ -24,7 +24,7 @@ Server with a domain specific API.
1. **Messaging Layer** 1. **Messaging Layer**
This is what the rest of the homeserver hits to send messages, join rooms, This is what the rest of the homeserver hits to send messages, join rooms,
etc. It also allows you to register callbacks for when it gets notified by etc. It also allows you to register callbacks for when it get's notified by
lower levels that e.g. a new message has been received. lower levels that e.g. a new message has been received.
It is responsible for serializing requests to send to the data It is responsible for serializing requests to send to the data

View File

@ -15,7 +15,6 @@ app_service_config_files:
The format of the AS configuration file is as follows: The format of the AS configuration file is as follows:
```yaml ```yaml
id: <your-AS-id>
url: <base url of AS> url: <base url of AS>
as_token: <token AS will add to requests to HS> as_token: <token AS will add to requests to HS>
hs_token: <token HS will add to requests to AS> hs_token: <token HS will add to requests to AS>

View File

@ -164,7 +164,7 @@ Synapse 1.6.0rc2 (2019-11-25)
Bugfixes Bugfixes
-------- --------
- Fix a bug which could cause the background database update handler for event labels to get stuck in a loop raising exceptions. ([\#6407](https://github.com/matrix-org/synapse/issues/6407)) - Fix a bug which could cause the background database update hander for event labels to get stuck in a loop raising exceptions. ([\#6407](https://github.com/matrix-org/synapse/issues/6407))
Synapse 1.6.0rc1 (2019-11-20) Synapse 1.6.0rc1 (2019-11-20)
@ -191,7 +191,7 @@ Bugfixes
- Appservice requests will no longer contain a double slash prefix when the appservice url provided ends in a slash. ([\#6306](https://github.com/matrix-org/synapse/issues/6306)) - Appservice requests will no longer contain a double slash prefix when the appservice url provided ends in a slash. ([\#6306](https://github.com/matrix-org/synapse/issues/6306))
- Fix `/purge_room` admin API. ([\#6307](https://github.com/matrix-org/synapse/issues/6307)) - Fix `/purge_room` admin API. ([\#6307](https://github.com/matrix-org/synapse/issues/6307))
- Fix the `hidden` field in the `devices` table for SQLite versions prior to 3.23.0. ([\#6313](https://github.com/matrix-org/synapse/issues/6313)) - Fix the `hidden` field in the `devices` table for SQLite versions prior to 3.23.0. ([\#6313](https://github.com/matrix-org/synapse/issues/6313))
- Fix bug which caused rejected events to be persisted with the wrong room state. ([\#6320](https://github.com/matrix-org/synapse/issues/6320)) - Fix bug which casued rejected events to be persisted with the wrong room state. ([\#6320](https://github.com/matrix-org/synapse/issues/6320))
- Fix bug where `rc_login` ratelimiting would prematurely kick in. ([\#6335](https://github.com/matrix-org/synapse/issues/6335)) - Fix bug where `rc_login` ratelimiting would prematurely kick in. ([\#6335](https://github.com/matrix-org/synapse/issues/6335))
- Prevent the server taking a long time to start up when guest registration is enabled. ([\#6338](https://github.com/matrix-org/synapse/issues/6338)) - Prevent the server taking a long time to start up when guest registration is enabled. ([\#6338](https://github.com/matrix-org/synapse/issues/6338))
- Fix bug where upgrading a guest account to a full user would fail when account validity is enabled. ([\#6359](https://github.com/matrix-org/synapse/issues/6359)) - Fix bug where upgrading a guest account to a full user would fail when account validity is enabled. ([\#6359](https://github.com/matrix-org/synapse/issues/6359))
@ -232,7 +232,7 @@ Internal Changes
- Add some documentation about worker replication. ([\#6305](https://github.com/matrix-org/synapse/issues/6305)) - Add some documentation about worker replication. ([\#6305](https://github.com/matrix-org/synapse/issues/6305))
- Move admin endpoints into separate files. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#6308](https://github.com/matrix-org/synapse/issues/6308)) - Move admin endpoints into separate files. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#6308](https://github.com/matrix-org/synapse/issues/6308))
- Document the use of `lint.sh` for code style enforcement & extend it to run on specified paths only. ([\#6312](https://github.com/matrix-org/synapse/issues/6312)) - Document the use of `lint.sh` for code style enforcement & extend it to run on specified paths only. ([\#6312](https://github.com/matrix-org/synapse/issues/6312))
- Add optional python dependencies and dependent binary libraries to snapcraft packaging. ([\#6317](https://github.com/matrix-org/synapse/issues/6317)) - Add optional python dependencies and dependant binary libraries to snapcraft packaging. ([\#6317](https://github.com/matrix-org/synapse/issues/6317))
- Remove the dependency on psutil and replace functionality with the stdlib `resource` module. ([\#6318](https://github.com/matrix-org/synapse/issues/6318), [\#6336](https://github.com/matrix-org/synapse/issues/6336)) - Remove the dependency on psutil and replace functionality with the stdlib `resource` module. ([\#6318](https://github.com/matrix-org/synapse/issues/6318), [\#6336](https://github.com/matrix-org/synapse/issues/6336))
- Improve documentation for EventContext fields. ([\#6319](https://github.com/matrix-org/synapse/issues/6319)) - Improve documentation for EventContext fields. ([\#6319](https://github.com/matrix-org/synapse/issues/6319))
- Add some checks that we aren't using state from rejected events. ([\#6330](https://github.com/matrix-org/synapse/issues/6330)) - Add some checks that we aren't using state from rejected events. ([\#6330](https://github.com/matrix-org/synapse/issues/6330))
@ -653,7 +653,7 @@ Internal Changes
- Return 502 not 500 when failing to reach any remote server. ([\#5810](https://github.com/matrix-org/synapse/issues/5810)) - Return 502 not 500 when failing to reach any remote server. ([\#5810](https://github.com/matrix-org/synapse/issues/5810))
- Reduce global pauses in the events stream caused by expensive state resolution during persistence. ([\#5826](https://github.com/matrix-org/synapse/issues/5826)) - Reduce global pauses in the events stream caused by expensive state resolution during persistence. ([\#5826](https://github.com/matrix-org/synapse/issues/5826))
- Add a lower bound to well-known lookup cache time to avoid repeated lookups. ([\#5836](https://github.com/matrix-org/synapse/issues/5836)) - Add a lower bound to well-known lookup cache time to avoid repeated lookups. ([\#5836](https://github.com/matrix-org/synapse/issues/5836))
- Whitelist history visibility sytests in worker mode tests. ([\#5843](https://github.com/matrix-org/synapse/issues/5843)) - Whitelist history visbility sytests in worker mode tests. ([\#5843](https://github.com/matrix-org/synapse/issues/5843))
Synapse 1.2.1 (2019-07-26) Synapse 1.2.1 (2019-07-26)
@ -817,7 +817,7 @@ See the [upgrade notes](docs/upgrade.md#upgrading-to-v110) for more details.
Features Features
-------- --------
- Added possibility to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092)) - Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092))
- Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252)) - Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252))
- Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363)) - Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363))
- Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461)) - Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461))
@ -850,7 +850,7 @@ Bugfixes
- Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507)) - Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507))
- Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514)) - Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514))
- Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523)) - Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523))
- Fixed m.login.jwt using unregistered user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586)) - Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586))
- Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576)) - Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576))

View File

@ -251,7 +251,7 @@ Internal Changes
- Optimise `/createRoom` with multiple invited users. ([\#8559](https://github.com/matrix-org/synapse/issues/8559)) - Optimise `/createRoom` with multiple invited users. ([\#8559](https://github.com/matrix-org/synapse/issues/8559))
- Implement and use an `@lru_cache` decorator. ([\#8595](https://github.com/matrix-org/synapse/issues/8595)) - Implement and use an `@lru_cache` decorator. ([\#8595](https://github.com/matrix-org/synapse/issues/8595))
- Don't instantiate Requester directly. ([\#8614](https://github.com/matrix-org/synapse/issues/8614)) - Don't instansiate Requester directly. ([\#8614](https://github.com/matrix-org/synapse/issues/8614))
- Type hints for `RegistrationStore`. ([\#8615](https://github.com/matrix-org/synapse/issues/8615)) - Type hints for `RegistrationStore`. ([\#8615](https://github.com/matrix-org/synapse/issues/8615))
- Change schema to support access tokens belonging to one user but granting access to another. ([\#8616](https://github.com/matrix-org/synapse/issues/8616)) - Change schema to support access tokens belonging to one user but granting access to another. ([\#8616](https://github.com/matrix-org/synapse/issues/8616))
- Remove unused OPTIONS handlers. ([\#8621](https://github.com/matrix-org/synapse/issues/8621)) - Remove unused OPTIONS handlers. ([\#8621](https://github.com/matrix-org/synapse/issues/8621))
@ -518,7 +518,7 @@ Bugfixes
- Fix a bug which cause the logging system to report errors, if `DEBUG` was enabled and no `context` filter was applied. ([\#8278](https://github.com/matrix-org/synapse/issues/8278)) - Fix a bug which cause the logging system to report errors, if `DEBUG` was enabled and no `context` filter was applied. ([\#8278](https://github.com/matrix-org/synapse/issues/8278))
- Fix edge case where push could get delayed for a user until a later event was pushed. ([\#8287](https://github.com/matrix-org/synapse/issues/8287)) - Fix edge case where push could get delayed for a user until a later event was pushed. ([\#8287](https://github.com/matrix-org/synapse/issues/8287))
- Fix fetching malformed events from remote servers. ([\#8324](https://github.com/matrix-org/synapse/issues/8324)) - Fix fetching malformed events from remote servers. ([\#8324](https://github.com/matrix-org/synapse/issues/8324))
- Fix `UnboundLocalError` from occurring when appservices send a malformed register request. ([\#8329](https://github.com/matrix-org/synapse/issues/8329)) - Fix `UnboundLocalError` from occuring when appservices send a malformed register request. ([\#8329](https://github.com/matrix-org/synapse/issues/8329))
- Don't send push notifications to expired user accounts. ([\#8353](https://github.com/matrix-org/synapse/issues/8353)) - Don't send push notifications to expired user accounts. ([\#8353](https://github.com/matrix-org/synapse/issues/8353))
- Fix a regression in v1.19.0 with reactivating users through the admin API. ([\#8362](https://github.com/matrix-org/synapse/issues/8362)) - Fix a regression in v1.19.0 with reactivating users through the admin API. ([\#8362](https://github.com/matrix-org/synapse/issues/8362))
- Fix a bug where during device registration the length of the device name wasn't limited. ([\#8364](https://github.com/matrix-org/synapse/issues/8364)) - Fix a bug where during device registration the length of the device name wasn't limited. ([\#8364](https://github.com/matrix-org/synapse/issues/8364))
@ -815,7 +815,7 @@ Bugfixes
- Fix a bug introduced in Synapse v1.7.2 which caused inaccurate membership counts in the room directory. ([\#7977](https://github.com/matrix-org/synapse/issues/7977)) - Fix a bug introduced in Synapse v1.7.2 which caused inaccurate membership counts in the room directory. ([\#7977](https://github.com/matrix-org/synapse/issues/7977))
- Fix a long standing bug: 'Duplicate key value violates unique constraint "event_relations_id"' when message retention is configured. ([\#7978](https://github.com/matrix-org/synapse/issues/7978)) - Fix a long standing bug: 'Duplicate key value violates unique constraint "event_relations_id"' when message retention is configured. ([\#7978](https://github.com/matrix-org/synapse/issues/7978))
- Fix "no create event in auth events" when trying to reject invitation after inviter leaves. Bug introduced in Synapse v1.10.0. ([\#7980](https://github.com/matrix-org/synapse/issues/7980)) - Fix "no create event in auth events" when trying to reject invitation after inviter leaves. Bug introduced in Synapse v1.10.0. ([\#7980](https://github.com/matrix-org/synapse/issues/7980))
- Fix various comments and minor discrepancies in server notices code. ([\#7996](https://github.com/matrix-org/synapse/issues/7996)) - Fix various comments and minor discrepencies in server notices code. ([\#7996](https://github.com/matrix-org/synapse/issues/7996))
- Fix a long standing bug where HTTP HEAD requests resulted in a 400 error. ([\#7999](https://github.com/matrix-org/synapse/issues/7999)) - Fix a long standing bug where HTTP HEAD requests resulted in a 400 error. ([\#7999](https://github.com/matrix-org/synapse/issues/7999))
- Fix a long-standing bug which caused two copies of some log lines to be written when synctl was used along with a MemoryHandler logger. ([\#8011](https://github.com/matrix-org/synapse/issues/8011), [\#8012](https://github.com/matrix-org/synapse/issues/8012)) - Fix a long-standing bug which caused two copies of some log lines to be written when synctl was used along with a MemoryHandler logger. ([\#8011](https://github.com/matrix-org/synapse/issues/8011), [\#8012](https://github.com/matrix-org/synapse/issues/8012))
@ -1460,7 +1460,7 @@ Bugfixes
- Transfer alias mappings on room upgrade. ([\#6946](https://github.com/matrix-org/synapse/issues/6946)) - Transfer alias mappings on room upgrade. ([\#6946](https://github.com/matrix-org/synapse/issues/6946))
- Ensure that a user interactive authentication session is tied to a single request. ([\#7068](https://github.com/matrix-org/synapse/issues/7068), [\#7455](https://github.com/matrix-org/synapse/issues/7455)) - Ensure that a user interactive authentication session is tied to a single request. ([\#7068](https://github.com/matrix-org/synapse/issues/7068), [\#7455](https://github.com/matrix-org/synapse/issues/7455))
- Fix a bug in the federation API which could cause occasional "Failed to get PDU" errors. ([\#7089](https://github.com/matrix-org/synapse/issues/7089)) - Fix a bug in the federation API which could cause occasional "Failed to get PDU" errors. ([\#7089](https://github.com/matrix-org/synapse/issues/7089))
- Return the proper error (`M_BAD_ALIAS`) when a non-existent canonical alias is provided. ([\#7109](https://github.com/matrix-org/synapse/issues/7109)) - Return the proper error (`M_BAD_ALIAS`) when a non-existant canonical alias is provided. ([\#7109](https://github.com/matrix-org/synapse/issues/7109))
- Fix a bug which meant that groups updates were not correctly replicated between workers. ([\#7117](https://github.com/matrix-org/synapse/issues/7117)) - Fix a bug which meant that groups updates were not correctly replicated between workers. ([\#7117](https://github.com/matrix-org/synapse/issues/7117))
- Fix starting workers when federation sending not split out. ([\#7133](https://github.com/matrix-org/synapse/issues/7133)) - Fix starting workers when federation sending not split out. ([\#7133](https://github.com/matrix-org/synapse/issues/7133))
- Ensure `is_verified` is a boolean in responses to `GET /_matrix/client/r0/room_keys/keys`. Also warn the user if they forgot the `version` query param. ([\#7150](https://github.com/matrix-org/synapse/issues/7150)) - Ensure `is_verified` is a boolean in responses to `GET /_matrix/client/r0/room_keys/keys`. Also warn the user if they forgot the `version` query param. ([\#7150](https://github.com/matrix-org/synapse/issues/7150))
@ -1482,7 +1482,7 @@ Bugfixes
- Fix bad error handling that would cause Synapse to crash if it's provided with a YAML configuration file that's either empty or doesn't parse into a key-value map. ([\#7341](https://github.com/matrix-org/synapse/issues/7341)) - Fix bad error handling that would cause Synapse to crash if it's provided with a YAML configuration file that's either empty or doesn't parse into a key-value map. ([\#7341](https://github.com/matrix-org/synapse/issues/7341))
- Fix incorrect metrics reporting for `renew_attestations` background task. ([\#7344](https://github.com/matrix-org/synapse/issues/7344)) - Fix incorrect metrics reporting for `renew_attestations` background task. ([\#7344](https://github.com/matrix-org/synapse/issues/7344))
- Prevent non-federating rooms from appearing in responses to federated `POST /publicRoom` requests when a filter was included. ([\#7367](https://github.com/matrix-org/synapse/issues/7367)) - Prevent non-federating rooms from appearing in responses to federated `POST /publicRoom` requests when a filter was included. ([\#7367](https://github.com/matrix-org/synapse/issues/7367))
- Fix a bug which would cause the room directory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](docs/upgrade.md#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387)) - Fix a bug which would cause the room durectory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](docs/upgrade.md#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387))
- Fix bug in `EventContext.deserialize`. ([\#7393](https://github.com/matrix-org/synapse/issues/7393)) - Fix bug in `EventContext.deserialize`. ([\#7393](https://github.com/matrix-org/synapse/issues/7393))
@ -1638,7 +1638,7 @@ Security advisory
----------------- -----------------
Synapse may be vulnerable to request-smuggling attacks when it is used with a Synapse may be vulnerable to request-smuggling attacks when it is used with a
reverse-proxy. The vulnerabilities are fixed in Twisted 20.3.0, and are reverse-proxy. The vulnerabilties are fixed in Twisted 20.3.0, and are
described in described in
[CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108) [CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108)
and and
@ -1748,7 +1748,7 @@ Internal Changes
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003)) - Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003))
- Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095)) - Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095))
- Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953)) - Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953))
- Minor performance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954)) - Minor perfermance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
- Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956)) - Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956))
- Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957)) - Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957))
- Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104)) - Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104))
@ -1809,7 +1809,7 @@ Bugfixes
- Allow URL-encoded User IDs on `/_synapse/admin/v2/users/<user_id>[/admin]` endpoints. Thanks to @NHAS for reporting. ([\#6825](https://github.com/matrix-org/synapse/issues/6825)) - Allow URL-encoded User IDs on `/_synapse/admin/v2/users/<user_id>[/admin]` endpoints. Thanks to @NHAS for reporting. ([\#6825](https://github.com/matrix-org/synapse/issues/6825))
- Fix Synapse refusing to start if `federation_certificate_verification_whitelist` option is blank. ([\#6849](https://github.com/matrix-org/synapse/issues/6849)) - Fix Synapse refusing to start if `federation_certificate_verification_whitelist` option is blank. ([\#6849](https://github.com/matrix-org/synapse/issues/6849))
- Fix errors from logging in the purge jobs related to the message retention policies support. ([\#6945](https://github.com/matrix-org/synapse/issues/6945)) - Fix errors from logging in the purge jobs related to the message retention policies support. ([\#6945](https://github.com/matrix-org/synapse/issues/6945))
- Return a 404 instead of 200 for querying information of a non-existent user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901)) - Return a 404 instead of 200 for querying information of a non-existant user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901))
Updates to the Docker image Updates to the Docker image
@ -1889,7 +1889,7 @@ Bugfixes
Synapse 1.10.0rc4 (2020-02-11) Synapse 1.10.0rc4 (2020-02-11)
============================== ==============================
This release candidate was built incorrectly and is superseded by 1.10.0rc5. This release candidate was built incorrectly and is superceded by 1.10.0rc5.
Synapse 1.10.0rc3 (2020-02-10) Synapse 1.10.0rc3 (2020-02-10)
============================== ==============================

View File

@ -2270,7 +2270,7 @@ Features
Bugfixes Bugfixes
-------- --------
- Fix spurious errors in logs when deleting a non-existent pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121)) - Fix spurious errors in logs when deleting a non-existant pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
- Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163)) - Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163))
- Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164)) - Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164))
- Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165)) - Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165))
@ -2522,7 +2522,7 @@ Bugfixes
- Fix a long-standing bug where a `m.image` event without a `url` would cause errors on push. ([\#8965](https://github.com/matrix-org/synapse/issues/8965)) - Fix a long-standing bug where a `m.image` event without a `url` would cause errors on push. ([\#8965](https://github.com/matrix-org/synapse/issues/8965))
- Fix a small bug in v2 state resolution algorithm, which could also cause performance issues for rooms with large numbers of power levels. ([\#8971](https://github.com/matrix-org/synapse/issues/8971)) - Fix a small bug in v2 state resolution algorithm, which could also cause performance issues for rooms with large numbers of power levels. ([\#8971](https://github.com/matrix-org/synapse/issues/8971))
- Add validation to the `sendToDevice` API to raise a missing parameters error instead of a 500 error. ([\#8975](https://github.com/matrix-org/synapse/issues/8975)) - Add validation to the `sendToDevice` API to raise a missing parameters error instead of a 500 error. ([\#8975](https://github.com/matrix-org/synapse/issues/8975))
- Add validation of group IDs to raise a 400 error instead of a 500 error. ([\#8977](https://github.com/matrix-org/synapse/issues/8977)) - Add validation of group IDs to raise a 400 error instead of a 500 eror. ([\#8977](https://github.com/matrix-org/synapse/issues/8977))
Improved Documentation Improved Documentation

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -10,17 +10,26 @@ The necessary tools are:
- [black](https://black.readthedocs.io/en/stable/), a source code formatter; - [black](https://black.readthedocs.io/en/stable/), a source code formatter;
- [isort](https://pycqa.github.io/isort/), which organises each file's imports; - [isort](https://pycqa.github.io/isort/), which organises each file's imports;
- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors; and - [flake8](https://flake8.pycqa.org/en/latest/), which can spot common errors; and
- [mypy](https://mypy.readthedocs.io/en/stable/), a type checker. - [mypy](https://mypy.readthedocs.io/en/stable/), a type checker.
See [the contributing guide](development/contributing_guide.md#run-the-linters) for instructions Install them with:
on how to install the above tools and run the linters.
```sh
pip install -e ".[lint,mypy]"
```
The easiest way to run the lints is to invoke the linter script as follows.
```sh
scripts-dev/lint.sh
```
It's worth noting that modern IDEs and text editors can run these tools It's worth noting that modern IDEs and text editors can run these tools
automatically on save. It may be worth looking into whether this automatically on save. It may be worth looking into whether this
functionality is supported in your editor for a more convenient functionality is supported in your editor for a more convenient
development workflow. It is not, however, recommended to run `mypy` development workflow. It is not, however, recommended to run `flake8` or `mypy`
on save as it takes a while and can be very resource intensive. on save as they take a while and can be very resource intensive.
## General rules ## General rules

Some files were not shown because too many files have changed in this diff Show More