diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 706eacbe90..bcd65b2f60 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -31,7 +31,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Extract version from pyproject.toml # Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml index 51ecce0c05..af7c3882ea 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml @@ -19,7 +19,7 @@ jobs: name: GitHub Pages runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: # Fetch all history so that the schema_versions script works. fetch-depth: 0 @@ -56,7 +56,7 @@ jobs: name: Check links in documentation runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Setup mdbook uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0 diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 2c6891fa1a..9ff6b75109 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -54,7 +54,7 @@ jobs: - pre steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: # Fetch all history so that the schema_versions script works. fetch-depth: 0 diff --git a/.github/workflows/fix_lint.yaml b/.github/workflows/fix_lint.yaml index a0a37f2a67..babc3bc5de 100644 --- a/.github/workflows/fix_lint.yaml +++ b/.github/workflows/fix_lint.yaml @@ -18,7 +18,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 9908633f8e..9e0f2c384e 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -42,7 +42,7 @@ jobs: if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master with: @@ -77,7 +77,7 @@ jobs: postgres-version: "14" steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -152,7 +152,7 @@ jobs: BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -202,14 +202,14 @@ jobs: steps: - name: Check out synapse codebase - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: path: synapse - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0 + - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod @@ -234,7 +234,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/poetry_lockfile.yaml b/.github/workflows/poetry_lockfile.yaml index c79343787d..35691806e8 100644 --- a/.github/workflows/poetry_lockfile.yaml +++ b/.github/workflows/poetry_lockfile.yaml @@ -19,7 +19,7 @@ jobs: name: "Check locked dependencies have sdists" runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" diff --git a/.github/workflows/push_complement_image.yml b/.github/workflows/push_complement_image.yml index b662b98754..e6d0894e83 100644 --- a/.github/workflows/push_complement_image.yml +++ b/.github/workflows/push_complement_image.yml @@ -33,17 +33,17 @@ jobs: packages: write steps: - name: Checkout specific branch (debug build) - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 if: github.event_name == 'workflow_dispatch' with: ref: ${{ inputs.branch }} - name: Checkout clean copy of develop (scheduled build) - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 if: github.event_name == 'schedule' with: ref: develop - name: Checkout clean copy of master (on-push) - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 if: github.event_name == 'push' with: ref: master diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 139d02866f..5f5b64dc64 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -27,7 +27,7 @@ jobs: name: "Calculate list of debian distros" runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -55,7 +55,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: path: src @@ -123,7 +123,7 @@ jobs: os: "ubuntu-24.04-arm" steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: @@ -161,7 +161,7 @@ jobs: if: ${{ !startsWith(github.ref, 'refs/pull/') }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.10" diff --git a/.github/workflows/schema.yaml b/.github/workflows/schema.yaml index 3f7c27f582..7509e51c41 100644 --- a/.github/workflows/schema.yaml +++ b/.github/workflows/schema.yaml @@ -18,7 +18,7 @@ jobs: name: Ensure Synapse config schema is valid runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -44,7 +44,7 @@ jobs: name: Ensure generated documentation is up-to-date runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f93c25c01f..715dfa93d9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -86,7 +86,7 @@ jobs: if: ${{ needs.changes.outputs.linting == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master with: @@ -106,7 +106,7 @@ jobs: if: ${{ needs.changes.outputs.linting == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -116,7 +116,7 @@ jobs: check-lockfile: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -129,7 +129,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Setup Poetry uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 @@ -151,7 +151,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -187,7 +187,7 @@ jobs: lint-crlf: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Check line endings run: scripts-dev/check_line_terminators.sh @@ -196,7 +196,7 @@ jobs: if: ${{ github.event_name == 'pull_request' && (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.event.pull_request.user.login != 'dependabot[bot]' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 0 @@ -214,7 +214,7 @@ jobs: if: ${{ needs.changes.outputs.rust == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -233,12 +233,12 @@ jobs: if: ${{ needs.changes.outputs.rust == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master with: - toolchain: nightly-2025-04-23 + toolchain: nightly-2026-02-01 components: clippy - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 @@ -251,7 +251,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -287,7 +287,7 @@ jobs: if: ${{ needs.changes.outputs.rust == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -307,7 +307,7 @@ jobs: needs: changes if: ${{ needs.changes.outputs.linting_readme == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -354,7 +354,7 @@ jobs: needs: linting-done runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -375,7 +375,7 @@ jobs: job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - run: sudo apt-get -qq install xmlsec1 - name: Set up PostgreSQL ${{ matrix.job.postgres-version }} if: ${{ matrix.job.postgres-version }} @@ -431,7 +431,7 @@ jobs: - changes runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -494,7 +494,7 @@ jobs: extras: ["all"] steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 # Install libs necessary for PyPy to build binary wheels for dependencies - run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 @@ -544,7 +544,7 @@ jobs: job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Prepare test blacklist run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers @@ -591,7 +591,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - run: sudo apt-get -qq install xmlsec1 postgresql-client - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: @@ -634,7 +634,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Add PostgreSQL apt repository # We need a version of pg_dump that can handle the version of # PostgreSQL being tested against. The Ubuntu package repository lags @@ -689,7 +689,7 @@ jobs: steps: - name: Checkout synapse codebase - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: path: synapse @@ -702,7 +702,7 @@ jobs: - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0 + - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod @@ -739,7 +739,7 @@ jobs: - changes steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -759,7 +759,7 @@ jobs: - changes steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master diff --git a/.github/workflows/triage_labelled.yml b/.github/workflows/triage_labelled.yml index 27ff1d80cd..31dddab012 100644 --- a/.github/workflows/triage_labelled.yml +++ b/.github/workflows/triage_labelled.yml @@ -22,7 +22,7 @@ jobs: # This field is case-sensitive. TARGET_STATUS: Needs info steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: # Only clone the script file we care about, instead of the whole repo. sparse-checkout: .ci/scripts/triage_labelled_issue.sh diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 2433632a7f..bd5c79f16d 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -43,7 +43,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -70,7 +70,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - run: sudo apt-get -qq install xmlsec1 - name: Install Rust @@ -117,7 +117,7 @@ jobs: - ${{ github.workspace }}:/src steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -175,14 +175,14 @@ jobs: steps: - name: Run actions/checkout@v4 for synapse - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: path: synapse - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0 + - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod @@ -217,7 +217,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/CHANGES.md b/CHANGES.md index d07bcfaba5..67a8812267 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,36 @@ +# Synapse 1.147.1 (2026-02-12) + +## Internal Changes + +- Block federation requests and events authenticated using a known insecure signing key. See [CVE-2026-24044](https://www.cve.org/CVERecord?id=CVE-2026-24044) / [ELEMENTSEC-2025-1670](https://github.com/element-hq/ess-helm/security/advisories/GHSA-qwcj-h6m8-vp6q). ([\#19459](https://github.com/element-hq/synapse/issues/19459)) + + + + +# Synapse 1.147.0 (2026-02-10) + +No significant changes since 1.147.0rc1. + +# Synapse 1.147.0rc1 (2026-02-03) + +## Bugfixes + +- Fix memory leak caused by not cleaning up stopped looping calls. Introduced in v1.140.0. ([\#19416](https://github.com/element-hq/synapse/issues/19416)) +- Fix a typo that incorrectly made `setuptools_rust` a runtime dependency. ([\#19417](https://github.com/element-hq/synapse/issues/19417)) + +## Internal Changes + +- Prune stale entries from `sliding_sync_connection_required_state` table. ([\#19306](https://github.com/element-hq/synapse/issues/19306)) +- Update "Event Send Time Quantiles" graph to only use dots for the event persistence rate (Grafana dashboard). ([\#19399](https://github.com/element-hq/synapse/issues/19399)) +- Update and align Grafana dashboard to use regex matching for `job` selectors (`job=~"$job"`) so the "all" value works correctly across all panels. ([\#19400](https://github.com/element-hq/synapse/issues/19400)) +- Don't retry joining partial state rooms all at once on startup. ([\#19402](https://github.com/element-hq/synapse/issues/19402)) +- Disallow requests to the health endpoint from containing trailing path characters. ([\#19405](https://github.com/element-hq/synapse/issues/19405)) +- Add notes that new experimental features should have associated tracking issues. ([\#19410](https://github.com/element-hq/synapse/issues/19410)) +- Bump `pyo3` from 0.26.0 to 0.27.2 and `pythonize` from 0.26.0 to 0.27.0. Contributed by @razvp @ ERCOM. ([\#19412](https://github.com/element-hq/synapse/issues/19412)) + + + + # Synapse 1.146.0 (2026-01-27) No significant changes since 1.146.0rc1. diff --git a/Cargo.lock b/Cargo.lock index e5ce9325df..8d1cd967d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -813,9 +813,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383" +checksum = "ab53c047fcd1a1d2a8820fe84f05d6be69e9526be40cb03b73f86b6b03e6d87d" dependencies = [ "anyhow", "indoc", @@ -831,18 +831,18 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f" +checksum = "b455933107de8642b4487ed26d912c2d899dec6114884214a0b3bb3be9261ea6" dependencies = [ "target-lexicon", ] [[package]] name = "pyo3-ffi" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105" +checksum = "1c85c9cbfaddf651b1221594209aed57e9e5cff63c4d11d1feead529b872a089" dependencies = [ "libc", "pyo3-build-config", @@ -861,9 +861,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded" +checksum = "0a5b10c9bf9888125d917fb4d2ca2d25c8df94c7ab5a52e13313a07e050a3b02" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -873,9 +873,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf" +checksum = "03b51720d314836e53327f5871d4c0cfb4fb37cc2c4a11cc71907a86342c40f9" dependencies = [ "heck", "proc-macro2", @@ -886,9 +886,9 @@ dependencies = [ [[package]] name = "pythonize" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11e06e4cff9be2bbf2bddf28a486ae619172ea57e79787f856572878c62dcfe2" +checksum = "a3a8f29db331e28c332c63496cfcbb822aca3d7320bc08b655d7fd0c29c50ede" dependencies = [ "pyo3", "serde", @@ -1207,15 +1207,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.145" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ "itoa", "memchr", - "ryu", "serde", "serde_core", + "zmij", ] [[package]] @@ -1921,3 +1921,9 @@ dependencies = [ "quote", "syn", ] + +[[package]] +name = "zmij" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ff05f8caa9038894637571ae6b9e29466c1f4f829d26c9b28f869a29cbe3445" diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index 38f70db28f..21be6ebb86 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -414,6 +414,10 @@ { "id": "custom.axisPlacement", "value": "right" + }, + { + "id": "custom.fillOpacity", + "value": 0 } ] }, @@ -441,6 +445,10 @@ { "id": "custom.axisPlacement", "value": "right" + }, + { + "id": "custom.fillOpacity", + "value": 0 } ] } @@ -2266,7 +2274,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "sum(avg_over_time(synapse_http_server_in_flight_requests_count{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(avg_over_time(synapse_http_server_in_flight_requests_count{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", "interval": "", "legendFormat": "Total", "refId": "B" @@ -4161,7 +4169,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4204,7 +4212,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4247,7 +4255,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache_hits{job=\"$job\",index=~\"$index\",name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache_hits{job=~\"$job\",index=~\"$index\",name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4262,7 +4270,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4304,7 +4312,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache_hits{job=\"$job\",index=~\"$index\",name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache_hits{job=~\"$job\",index=~\"$index\",name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4319,7 +4327,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4361,7 +4369,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache_hits{job=\"$job\",index=~\"$index\",name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache_hits{job=~\"$job\",index=~\"$index\",name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4376,7 +4384,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4706,7 +4714,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "histogram_quantile(0.99, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=\"$job\"}[$bucket_size])) by (le))", + "expr": "histogram_quantile(0.99, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=~\"$job\"}[$bucket_size])) by (le))", "format": "time_series", "intervalFactor": 1, "legendFormat": "99%", @@ -4716,7 +4724,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "histogram_quantile(0.9, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=\"$job\"}[$bucket_size])) by (le))", + "expr": "histogram_quantile(0.9, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=~\"$job\"}[$bucket_size])) by (le))", "format": "time_series", "intervalFactor": 1, "legendFormat": "90%", @@ -4726,7 +4734,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "histogram_quantile(0.75, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=\"$job\"}[$bucket_size])) by (le))", + "expr": "histogram_quantile(0.75, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=~\"$job\"}[$bucket_size])) by (le))", "format": "time_series", "intervalFactor": 1, "legendFormat": "75%", @@ -4736,7 +4744,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "histogram_quantile(0.5, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=\"$job\"}[$bucket_size])) by (le))", + "expr": "histogram_quantile(0.5, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=~\"$job\"}[$bucket_size])) by (le))", "format": "time_series", "intervalFactor": 1, "legendFormat": "50%", @@ -6937,7 +6945,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "rate(synapse_notifier_users_woken_by_stream_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_notifier_users_woken_by_stream_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "format": "time_series", "hide": false, "intervalFactor": 2, @@ -7116,7 +7124,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_notified_presence_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_notified_presence_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Notified", "refId": "A" @@ -7125,7 +7133,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_federation_presence_out_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_federation_presence_out_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Remote ping", "refId": "B" @@ -7134,7 +7142,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_presence_updates_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_presence_updates_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Total updates", "refId": "C" @@ -7143,7 +7151,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_federation_presence_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_federation_presence_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Remote updates", "refId": "D" @@ -7152,7 +7160,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_bump_active_time_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_bump_active_time_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Bump active time", "refId": "E" @@ -7187,7 +7195,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "rate(synapse_handler_presence_state_transition_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_state_transition_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "{{from}} -> {{to}}", "range": true, @@ -7223,7 +7231,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "rate(synapse_handler_presence_notify_reason_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_notify_reason_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "{{reason}}", "range": true, diff --git a/debian/changelog b/debian/changelog index ac013ba1b8..a6852dac5e 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,21 @@ +matrix-synapse-py3 (1.147.1) stable; urgency=medium + + * New synapse release 1.147.1. + + -- Synapse Packaging team Thu, 12 Feb 2026 15:45:15 +0000 + +matrix-synapse-py3 (1.147.0) stable; urgency=medium + + * New synapse release 1.147.0. + + -- Synapse Packaging team Tue, 10 Feb 2026 12:39:58 +0000 + +matrix-synapse-py3 (1.147.0~rc1) stable; urgency=medium + + * New Synapse release 1.147.0rc1. + + -- Synapse Packaging team Tue, 03 Feb 2026 08:53:17 -0700 + matrix-synapse-py3 (1.146.0) stable; urgency=medium * New Synapse release 1.146.0. diff --git a/docs/development/experimental_features.md b/docs/development/experimental_features.md index d6b11496cc..5a86017ecf 100644 --- a/docs/development/experimental_features.md +++ b/docs/development/experimental_features.md @@ -35,3 +35,30 @@ but one should be used if unsure. New experimental configuration flags should be added under the `experimental` configuration key (see the `synapse.config.experimental` file) and either explain (briefly) what is being enabled, or include the MSC number. +The configuration flag should link to the tracking issue for the experimental feature (see below). + + +## Tracking issues for experimental features + +In the interest of having some documentation around experimental features, without +polluting the stable documentation, all new experimental features should have a tracking issue with +[the `T-ExperimentalFeature` label](https://github.com/element-hq/synapse/issues?q=sort%3Aupdated-desc+state%3Aopen+label%3A%22T-ExperimentalFeature%22), +kept open as long as the experimental feature is present in Synapse. + +The configuration option for the feature should have a comment linking to the tracking issue, +for ease of discoverability. + +As a guideline, the issue should contain: + +- Context for why this experimental feature is in Synapse + - This could well be a link to somewhere else, where this context is already available. +- If applicable, why the feature is enabled by default. (Why do we need to enable it by default and why is it safe?) +- If applicable, setup instructions for any non-standard components or configuration needed by the feature. + (Ideally this will be moved to the configuration manual after stabilisation.) +- Design decisions behind the Synapse implementation. + (Ideally this will be moved to the developers' documentation after stabilisation.) +- Any caveats around the current implementation of the feature, such as: + - missing aspects + - breakage or incompatibility that is expected if/when the feature is stabilised, + or when the feature is turned on/off +- Criteria for how we know whether we can remove the feature in the future. diff --git a/poetry.lock b/poetry.lock index b5f0ae4e55..f47844bded 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2594,14 +2594,14 @@ six = ">=1.5" [[package]] name = "python-multipart" -version = "0.0.20" +version = "0.0.22" description = "A streaming multipart parser for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, - {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, + {file = "python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155"}, + {file = "python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index 8080acc3b0..d2d382a596 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "matrix-synapse" -version = "1.146.0" +version = "1.147.1" description = "Homeserver for the Matrix decentralised comms protocol" readme = "README.rst" authors = [ diff --git a/rust/Cargo.toml b/rust/Cargo.toml index e8321d159b..350701d327 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -30,14 +30,14 @@ http = "1.1.0" lazy_static = "1.4.0" log = "0.4.17" mime = "0.3.17" -pyo3 = { version = "0.26.0", features = [ +pyo3 = { version = "0.27.2", features = [ "macros", "anyhow", "abi3", "abi3-py310", ] } pyo3-log = "0.13.1" -pythonize = "0.26.0" +pythonize = "0.27.0" regex = "1.6.0" sha2 = "0.10.8" serde = { version = "1.0.144", features = ["derive"] } diff --git a/rust/src/http.rs b/rust/src/http.rs index 63ed05be54..8d462f2e5e 100644 --- a/rust/src/http.rs +++ b/rust/src/http.rs @@ -32,7 +32,7 @@ fn read_io_body(body: &Bound<'_, PyAny>, chunk_size: usize) -> PyResult { let mut buf = BytesMut::new(); loop { let bound = &body.call_method1("read", (chunk_size,))?; - let bytes: &Bound<'_, PyBytes> = bound.downcast()?; + let bytes: &Bound<'_, PyBytes> = bound.cast()?; if bytes.as_bytes().is_empty() { return Ok(buf.into()); } @@ -58,12 +58,12 @@ pub fn http_request_from_twisted(request: &Bound<'_, PyAny>) -> PyResult = bound.downcast()?; + let uri: &Bound<'_, PyBytes> = bound.cast()?; *req.uri_mut() = Uri::try_from(uri.as_bytes()).map_err(|_| PyValueError::new_err("invalid uri"))?; let bound = &request.getattr("method")?; - let method: &Bound<'_, PyBytes> = bound.downcast()?; + let method: &Bound<'_, PyBytes> = bound.cast()?; *req.method_mut() = Method::from_bytes(method.as_bytes()) .map_err(|_| PyValueError::new_err("invalid method"))?; @@ -74,17 +74,17 @@ pub fn http_request_from_twisted(request: &Bound<'_, PyAny>) -> PyResult = header.downcast()?; + let header: &Bound<'_, PyTuple> = header.cast()?; let bound = &header.get_item(0)?; - let name: &Bound<'_, PyBytes> = bound.downcast()?; + let name: &Bound<'_, PyBytes> = bound.cast()?; let name = HeaderName::from_bytes(name.as_bytes()) .map_err(|_| PyValueError::new_err("invalid header name"))?; let bound = &header.get_item(1)?; - let values: &Bound<'_, PySequence> = bound.downcast()?; + let values: &Bound<'_, PySequence> = bound.cast()?; for index in 0..values.len()? { let bound = &values.get_item(index)?; - let value: &Bound<'_, PyBytes> = bound.downcast()?; + let value: &Bound<'_, PyBytes> = bound.cast()?; let value = HeaderValue::from_bytes(value.as_bytes()) .map_err(|_| PyValueError::new_err("invalid header value"))?; req.headers_mut().append(name.clone(), value); diff --git a/rust/src/http_client.rs b/rust/src/http_client.rs index 4bd80c8e04..b1e4f753b8 100644 --- a/rust/src/http_client.rs +++ b/rust/src/http_client.rs @@ -316,5 +316,8 @@ fn make_deferred_yieldable<'py>( func }); - make_deferred_yieldable.call1(py, (deferred,))?.extract(py) + make_deferred_yieldable + .call1(py, (deferred,))? + .extract(py) + .map_err(Into::into) } diff --git a/rust/src/push/mod.rs b/rust/src/push/mod.rs index b0cedd758c..ac9b9c93e4 100644 --- a/rust/src/push/mod.rs +++ b/rust/src/push/mod.rs @@ -273,14 +273,16 @@ pub enum SimpleJsonValue { Null, } -impl<'source> FromPyObject<'source> for SimpleJsonValue { - fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { - if let Ok(s) = ob.downcast::() { +impl<'source> FromPyObject<'_, 'source> for SimpleJsonValue { + type Error = PyErr; + + fn extract(ob: Borrowed<'_, 'source, PyAny>) -> Result { + if let Ok(s) = ob.cast::() { Ok(SimpleJsonValue::Str(Cow::Owned(s.to_string()))) // A bool *is* an int, ensure we try bool first. - } else if let Ok(b) = ob.downcast::() { + } else if let Ok(b) = ob.cast::() { Ok(SimpleJsonValue::Bool(b.extract()?)) - } else if let Ok(i) = ob.downcast::() { + } else if let Ok(i) = ob.cast::() { Ok(SimpleJsonValue::Int(i.extract()?)) } else if ob.is_none() { Ok(SimpleJsonValue::Null) @@ -301,12 +303,14 @@ pub enum JsonValue { Value(SimpleJsonValue), } -impl<'source> FromPyObject<'source> for JsonValue { - fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { - if let Ok(l) = ob.downcast::() { +impl<'source> FromPyObject<'_, 'source> for JsonValue { + type Error = PyErr; + + fn extract(ob: Borrowed<'_, 'source, PyAny>) -> Result { + if let Ok(l) = ob.cast::() { match l .iter() - .map(|it| SimpleJsonValue::extract_bound(&it)) + .map(|it| SimpleJsonValue::extract(it.as_borrowed())) .collect() { Ok(a) => Ok(JsonValue::Array(a)), @@ -314,7 +318,7 @@ impl<'source> FromPyObject<'source> for JsonValue { "Can't convert to JsonValue::Array: {e}" ))), } - } else if let Ok(v) = SimpleJsonValue::extract_bound(ob) { + } else if let Ok(v) = SimpleJsonValue::extract(ob) { Ok(JsonValue::Value(v)) } else { Err(PyTypeError::new_err(format!( @@ -385,9 +389,11 @@ impl<'source> IntoPyObject<'source> for Condition { } } -impl<'source> FromPyObject<'source> for Condition { - fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { - Ok(depythonize(ob)?) +impl<'source> FromPyObject<'_, 'source> for Condition { + type Error = PyErr; + + fn extract(ob: Borrowed<'_, 'source, PyAny>) -> Result { + Ok(depythonize(&ob)?) } } diff --git a/schema/synapse-config.schema.yaml b/schema/synapse-config.schema.yaml index be1d5d7500..215447cf29 100644 --- a/schema/synapse-config.schema.yaml +++ b/schema/synapse-config.schema.yaml @@ -1,5 +1,5 @@ $schema: https://famedly.github.io/synapse/latest/schema/v1/meta.schema.json -$id: https://famedly.github.io/synapse/schema/synapse/v1.146/synapse-config.schema.json +$id: https://famedly.github.io/synapse/schema/synapse/v1.147/synapse-config.schema.json type: object properties: famedly_maximum_refresh_token_lifetime: @@ -2373,7 +2373,7 @@ properties: Enable the local on-disk media storage provider. When disabled, media is stored only in configured `media_storage_providers` and temporary files are used for processing. - + **Warning:** If this option is set to `false` and no `media_storage_providers` are configured, all media requests will return 404 errors as there will be no storage backend available. diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 22598426fa..593006a2db 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -366,7 +366,11 @@ class MSC3866Config: class ExperimentalConfig(Config): - """Config section for enabling experimental features""" + """Config section for enabling experimental features + + All new experimental features should have a tracking issue with the + `T-ExperimentalFeatures` label, kept open as long as the experimental + feature is present in Synapse.""" section = "experimental" diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 883f682e77..0d4d5e0e17 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -22,6 +22,7 @@ import abc import logging from contextlib import ExitStack +from http import HTTPStatus from typing import TYPE_CHECKING, Callable, Iterable import attr @@ -60,6 +61,15 @@ logger = logging.getLogger(__name__) +# List of Unpadded Base64 server signing keys that are known to be vulnerable to attack. +# Incoming requests from homeservers using any of these keys should be refused. +# Events containing signatures using any of these keys should be refused. +BANNED_SERVER_SIGNING_KEYS = ( + # ELEMENTSEC-2025-1670 + "l/O9hxMVKB6Lg+3Hqf0FQQZhVESQcMzbPN1Cz2nM3og=", +) + + @attr.s(slots=True, frozen=True, cmp=False, auto_attribs=True) class VerifyJsonRequest: """ @@ -349,6 +359,19 @@ async def process_request(self, verify_request: VerifyJsonRequest) -> None: if key_result.valid_until_ts < verify_request.minimum_valid_until_ts: continue + key = encode_verify_key_base64(key_result.verify_key) + if key in BANNED_SERVER_SIGNING_KEYS: + raise SynapseError( + HTTPStatus.UNAUTHORIZED, + "Server signing key %s:%s for server %s has been banned by this server" + % ( + key_result.verify_key.alg, + key_result.verify_key.version, + verify_request.server_name, + ), + Codes.UNAUTHORIZED, + ) + await self.process_json(key_result.verify_key, verify_request) verified = True diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 611f307d31..9050c2f934 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1782,6 +1782,10 @@ async def _resume_partial_state_room_sync(self) -> None: room_id=room_id, ) + # We don't start all the partial state room syncs at once, to avoid + # overloading the process. + await self.clock.sleep(Duration(milliseconds=10)) + def _start_partial_state_room_sync( self, initial_destination: str | None, diff --git a/synapse/rest/health.py b/synapse/rest/health.py index ae7cab7a2d..9c7a846076 100644 --- a/synapse/rest/health.py +++ b/synapse/rest/health.py @@ -22,6 +22,8 @@ from twisted.web.resource import Resource from twisted.web.server import Request +from synapse.api.errors import Codes + class HealthResource(Resource): """A resource that does nothing except return a 200 with a body of `OK`, @@ -34,5 +36,15 @@ class HealthResource(Resource): isLeaf = 1 def render_GET(self, request: Request) -> bytes: + # Prevent path traversal by ensuring the request path is exactly /health. + if request.path != b"/health": + request.setResponseCode(404) + body = ( + '{"errcode":"' + + Codes.UNRECOGNIZED + + '","error":"Unrecognized request"}' + ) + return body.encode("utf-8") + request.setHeader(b"Content-Type", b"text/plain") return b"OK" diff --git a/synapse/storage/databases/main/sliding_sync.py b/synapse/storage/databases/main/sliding_sync.py index c66002dae4..9a09c0f9b5 100644 --- a/synapse/storage/databases/main/sliding_sync.py +++ b/synapse/storage/databases/main/sliding_sync.py @@ -450,6 +450,9 @@ def _get_and_clear_connection_positions_txn( # Now that we have seen the client has received and used the connection # position, we can delete all the other connection positions. + # + # Note: the rest of the code here assumes this is the only remaining + # connection position. sql = """ DELETE FROM sliding_sync_connection_positions WHERE connection_key = ? AND connection_position != ? @@ -485,9 +488,10 @@ def _get_and_clear_connection_positions_txn( ), ) - required_state_map: dict[int, dict[str, set[str]]] = {} + # Map from required_state_id -> event type -> set of state keys. + stored_required_state_id_maps: dict[int, dict[str, set[str]]] = {} for row in rows: - state = required_state_map[row[0]] = {} + state = stored_required_state_id_maps[row[0]] = {} for event_type, state_key in db_to_json(row[1]): state.setdefault(event_type, set()).add(state_key) @@ -512,7 +516,44 @@ def _get_and_clear_connection_positions_txn( ) in room_config_rows: room_configs[room_id] = RoomSyncConfig( timeline_limit=timeline_limit, - required_state_map=required_state_map[required_state_id], + required_state_map=stored_required_state_id_maps[required_state_id], + ) + + # Clean up any `required_state_id`s that are no longer used by any + # connection position on this connection. + # + # We store the required state config per-connection per-room. Since this + # can be a lot of data, we deduplicate the required state JSON and store + # it separately, with multiple rooms referencing the same `required_state_id`. + # Over time as the required state configs change, some `required_state_id`s + # may no longer be referenced by any room config, so we need + # to clean them up. + # + # We do this by noting that we have pulled out *all* rows from + # `sliding_sync_connection_required_state` for this connection above. We + # have also pulled out all referenced `required_state_id`s for *this* + # connection position, which is the only connection position that + # remains (we deleted the others above). + # + # Thus we can compute the unused `required_state_id`s by looking for any + # `required_state_id`s that are not referenced by the remaining connection + # position. + used_required_state_ids = { + required_state_id for _, _, required_state_id in room_config_rows + } + + unused_required_state_ids = ( + stored_required_state_id_maps.keys() - used_required_state_ids + ) + if unused_required_state_ids: + self.db_pool.simple_delete_many_batch_txn( + txn, + table="sliding_sync_connection_required_state", + keys=("connection_key", "required_state_id"), + values=[ + (connection_key, required_state_id) + for required_state_id in unused_required_state_ids + ], ) # Now look up the per-room stream data. diff --git a/synapse/util/check_dependencies.py b/synapse/util/check_dependencies.py index 7e92b55592..cf7573c99d 100644 --- a/synapse/util/check_dependencies.py +++ b/synapse/util/check_dependencies.py @@ -32,6 +32,7 @@ from packaging.markers import Marker, Value, Variable, default_environment from packaging.requirements import Requirement +from packaging.utils import canonicalize_name DISTRIBUTION_NAME = "matrix-synapse" @@ -96,7 +97,7 @@ def _should_ignore_runtime_requirement(req: Requirement) -> bool: # In any case, workaround this by ignoring setuptools_rust here. (It might be # slightly cleaner to put `setuptools_rust` in a `build` extra or similar, but for # now let's do something quick and dirty. - if req.name == "setuptools_rust": + if canonicalize_name(req.name) == "setuptools-rust": return True return False diff --git a/synapse/util/clock.py b/synapse/util/clock.py index 4355704f8a..a3872d6f93 100644 --- a/synapse/util/clock.py +++ b/synapse/util/clock.py @@ -15,10 +15,12 @@ import logging +from functools import wraps from typing import ( Any, Callable, ) +from weakref import WeakSet from typing_extensions import ParamSpec from zope.interface import implementer @@ -86,7 +88,7 @@ def __init__(self, reactor: ISynapseThreadlessReactor, server_name: str) -> None self._delayed_call_id: int = 0 """Unique ID used to track delayed calls""" - self._looping_calls: list[LoopingCall] = [] + self._looping_calls: WeakSet[LoopingCall] = WeakSet() """List of active looping calls""" self._call_id_to_delayed_call: dict[int, IDelayedCall] = {} @@ -193,6 +195,7 @@ def _looping_call_common( if now: looping_call_context_string = "looping_call_now" + @wraps(f) def wrapped_f(*args: P.args, **kwargs: P.kwargs) -> Deferred: clock_debug_logger.debug( "%s(%s): Executing callback", looping_call_context_string, instance_id @@ -240,7 +243,7 @@ def wrapped_f(*args: P.args, **kwargs: P.kwargs) -> Deferred: with context.PreserveLoggingContext(): d = call.start(duration.as_secs(), now=now) d.addErrback(log_failure, "Looping call died", consumeErrors=False) - self._looping_calls.append(call) + self._looping_calls.add(call) clock_debug_logger.debug( "%s(%s): Scheduled looping call every %sms later", @@ -302,6 +305,7 @@ def call_later( if self._is_shutdown: raise Exception("Cannot start delayed call. Clock has been shutdown") + @wraps(callback) def wrapped_callback(*args: Any, **kwargs: Any) -> None: clock_debug_logger.debug("call_later(%s): Executing callback", call_id) diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 3cc905f699..6bc935f272 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -20,7 +20,7 @@ # import time from typing import Any, cast -from unittest.mock import Mock +from unittest.mock import Mock, patch import attr import canonicaljson @@ -238,6 +238,51 @@ def test_verify_json_for_server(self) -> None: # self.assertFalse(d.called) self.get_success(d) + def test_verify_json_for_server_using_banned_key(self) -> None: + """Ensure that JSON signed using a banned server_signing_key fails verification.""" + kr = keyring.Keyring(self.hs) + + banned_signing_key = signedjson.key.generate_signing_key("1") + r = self.hs.get_datastores().main.store_server_keys_response( + "server9", + from_server="test", + ts_added_ms=int(time.time() * 1000), + verify_keys={ + get_key_id(banned_signing_key): FetchKeyResult( + verify_key=get_verify_key(banned_signing_key), valid_until_ts=1000 + ) + }, + # The entire response gets signed & stored, just include the bits we + # care about. + response_json={ + "verify_keys": { + get_key_id(banned_signing_key): { + "key": encode_verify_key_base64( + get_verify_key(banned_signing_key) + ) + } + } + }, + ) + self.get_success(r) + + json1: JsonDict = {} + signedjson.sign.sign_json(json1, "server9", banned_signing_key) + + # Ensure the signatures check out normally + d = kr.verify_json_for_server("server9", json1, 500) + self.get_success(d) + + # Patch the list of banned signing keys and ensure the signature check fails + with patch.object( + keyring, + "BANNED_SERVER_SIGNING_KEYS", + (encode_verify_key_base64(get_verify_key(banned_signing_key))), + ): + # should fail on a signed object signed by the banned key + d = kr.verify_json_for_server("server9", json1, 500) + self.get_failure(d, SynapseError) + def test_verify_for_local_server(self) -> None: """Ensure that locally signed JSON can be verified without fetching keys over federation diff --git a/tests/federation/test_federation_base.py b/tests/federation/test_federation_base.py new file mode 100644 index 0000000000..1bc1da1feb --- /dev/null +++ b/tests/federation/test_federation_base.py @@ -0,0 +1,68 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2026 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# + + +from unittest.mock import patch + +from signedjson.key import encode_verify_key_base64, get_verify_key + +from synapse.crypto import keyring +from synapse.crypto.event_signing import add_hashes_and_signatures +from synapse.events import make_event_from_dict +from synapse.federation.federation_base import InvalidEventSignatureError + +from tests import unittest + + +class FederationBaseTestCase(unittest.HomeserverTestCase): + def test_events_signed_by_banned_key_are_refused(self) -> None: + """Ensure that event JSON signed using a banned server_signing_key fails verification.""" + event_dict = { + "content": {"body": "Here is the message content"}, + "event_id": "$0:domain", + "origin_server_ts": 1000000, + "type": "m.room.message", + "room_id": "!r:domain", + "sender": f"@u:{self.hs.config.server.server_name}", + "signatures": {}, + "unsigned": {"age_ts": 1000000}, + } + + add_hashes_and_signatures( + self.hs.config.server.default_room_version, + event_dict, + self.hs.config.server.server_name, + self.hs.signing_key, + ) + event = make_event_from_dict(event_dict) + fs = self.hs.get_federation_server() + + # Ensure the signatures check out normally + self.get_success( + fs._check_sigs_and_hash(self.hs.config.server.default_room_version, event) + ) + + # Patch the list of banned signing keys and ensure the signature check fails + with patch.object( + keyring, + "BANNED_SERVER_SIGNING_KEYS", + (encode_verify_key_base64(get_verify_key(self.hs.signing_key))), + ): + self.get_failure( + fs._check_sigs_and_hash( + self.hs.config.server.default_room_version, event + ), + InvalidEventSignatureError, + ) diff --git a/tests/rest/test_health.py b/tests/rest/test_health.py index bdbfce796a..17249b4eae 100644 --- a/tests/rest/test_health.py +++ b/tests/rest/test_health.py @@ -33,3 +33,16 @@ def test_health(self) -> None: self.assertEqual(channel.code, 200) self.assertEqual(channel.result["body"], b"OK") + + def test_health_path_traversal(self) -> None: + """ + Test that the health endpoint does not allow extra path segments, + which could be used to access other resources. + + Regression test for: https://github.com/element-hq/synapse/issues/19395 + """ + channel = self.make_request("GET", "/health/extra/path", shorthand=False) + + self.assertEqual(channel.code, 404) + self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED") + self.assertIn("error", channel.json_body) diff --git a/tests/storage/test_sliding_sync_tables.py b/tests/storage/test_sliding_sync_tables.py index cb9be29c5d..f5bbd49663 100644 --- a/tests/storage/test_sliding_sync_tables.py +++ b/tests/storage/test_sliding_sync_tables.py @@ -3120,6 +3120,248 @@ def test_lazy_loading_room_members_last_seen_ts(self) -> None: # The timestamp for user1 should be updated. self.assertGreater(lazy_member_entries[user1_id], prev_timestamp) + def test_pruning_sliding_sync_connection_required_state(self) -> None: + """Test that we prune old entries from + `sliding_sync_connection_required_state`. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + self.helper.send_state( + room_id, EventTypes.Name, {"name": "A room"}, tok=user1_tok + ) + + # Do an initial sync, this will pull down the above room and thus cause + # us to store a single required state entry for the room. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Check that we have an entry in sliding_sync_connection_required_state + connection_pos1 = self.get_success( + SlidingSyncStreamToken.from_string(self.store, from_token) + ).connection_position + + connection_key = self.get_success( + self.store.db_pool.simple_select_one_onecol( + table="sliding_sync_connection_positions", + keyvalues={"connection_position": connection_pos1}, + retcol="connection_key", + ) + ) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + # We expect a single entry here for the one room ID. + self.assertEqual(len(required_state_entries), 1) + first_required_state_id = required_state_entries[0][0] + + # Update the sync body to request more required state, so that we get + # another entry in the table. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Name, ""], + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + + # We need to send a message to cause the room to come down the next + # sync. This shouldn't be necessary, but we don't currently implement + # immediately sending down the room when required_state is updated, + # see https://github.com/element-hq/synapse/issues/18844 + self.helper.send(room_id, "msg1", tok=user1_tok) + + _, from_token = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + # We expect two entries here, one for old state and one for new state. + # The old entry doesn't get pruned yet as the previous from_token could + # still be used. + self.assertEqual(len(required_state_entries), 2) + + # Sync again with the latest token. This time we expect the old + # entry to be pruned. + self.do_sync(sync_body, since=from_token, tok=user1_tok) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + self.assertEqual(len(required_state_entries), 1) + + # Double check that we have pruned the old entry. + self.assertNotEqual(required_state_entries[0][0], first_required_state_id) + + def test_pruning_sliding_sync_connection_required_state_forks(self) -> None: + """Test that we prune entries in + `sliding_sync_connection_required_state` for forked positions. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + self.helper.send_state( + room_id, EventTypes.Name, {"name": "A room"}, tok=user1_tok + ) + + # Do an initial sync, this will pull down the above room and thus cause + # us to store a single required state entry for the room. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Check that we have an entry in sliding_sync_connection_required_state + connection_pos1 = self.get_success( + SlidingSyncStreamToken.from_string(self.store, from_token) + ).connection_position + + connection_key = self.get_success( + self.store.db_pool.simple_select_one_onecol( + table="sliding_sync_connection_positions", + keyvalues={"connection_position": connection_pos1}, + retcol="connection_key", + ) + ) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + # We expect a single entry here for the one room ID. + self.assertEqual(len(required_state_entries), 1) + first_required_state_id = required_state_entries[0][0] + + # Update the sync body to request more required state, so that we get + # another entry in the table. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Name, ""], + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + + # We need to send a message to cause the room to come down the next + # sync. This shouldn't be necessary, but we don't currently implement + # immediately sending down the room when required_state is updated, + # see https://github.com/element-hq/synapse/issues/18844 + self.helper.send(room_id, "msg1", tok=user1_tok) + + _, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + # We expect two entries here, one for old state and one for new state. + # The old entry doesn't get pruned yet as the previous from_token could + # still be used. + self.assertEqual(len(required_state_entries), 2) + second_required_state_id = sorted(required_state_entries)[1][0] + + # We sync again, but with the old token, creating a fork in the + # connection positions. We change the sync body again so that the + # `required_state` doesn't get deduplicated. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Topic, ""], + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # There should now be three entries, one for each of the required_state. + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + self.assertEqual(len(required_state_entries), 3) + + # Sync again with the latest token. This should prune all except the + # latest entry in `sliding_sync_connection_required_state`. + _, from_token = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + self.assertEqual(len(required_state_entries), 1) + + # Double check that we have pruned the old entry. + self.assertNotEqual(required_state_entries[0][0], first_required_state_id) + self.assertNotEqual(required_state_entries[0][0], second_required_state_id) + class SlidingSyncTablesBackgroundUpdatesTestCase(SlidingSyncTablesTestCaseBase): """ diff --git a/tests/util/test_check_dependencies.py b/tests/util/test_check_dependencies.py index b7a23dcd9d..eed0519c44 100644 --- a/tests/util/test_check_dependencies.py +++ b/tests/util/test_check_dependencies.py @@ -201,13 +201,13 @@ def test_setuptools_rust_ignored(self) -> None: """ with patch( "synapse.util.check_dependencies.metadata.requires", - return_value=["setuptools_rust >= 1.3"], + return_value=["setuptools-rust >= 1.3"], ): with self.mock_installed_package(None): - # should not raise, even if setuptools_rust is not installed + # should not raise, even if setuptools-rust is not installed check_requirements() with self.mock_installed_package(old): - # We also ignore old versions of setuptools_rust + # We also ignore old versions of setuptools-rust check_requirements() def test_python_version_markers_respected(self) -> None: diff --git a/tests/util/test_clock.py b/tests/util/test_clock.py new file mode 100644 index 0000000000..6c5a1158f5 --- /dev/null +++ b/tests/util/test_clock.py @@ -0,0 +1,77 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 Element Creations Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# + +import weakref + +from synapse.util.duration import Duration + +from tests.unittest import HomeserverTestCase + + +class ClockTestCase(HomeserverTestCase): + def test_looping_calls_are_gced(self) -> None: + """Test that looping calls are garbage collected after being stopped. + + The `Clock` tracks looping calls so to allow stopping of all looping + calls via the clock. + """ + clock = self.hs.get_clock() + + # Create a new looping call, and take a weakref to it. + call = clock.looping_call(lambda: None, Duration(seconds=1)) + + weak_call = weakref.ref(call) + + # Stop the looping call. It should get garbage collected after this. + call.stop() + + # Delete our strong reference to the call (otherwise it won't get garbage collected). + del call + + # Check that the call has been garbage collected. + self.assertIsNone(weak_call()) + + def test_looping_calls_stopped_on_clock_shutdown(self) -> None: + """Test that looping calls are stopped when the clock is shut down.""" + clock = self.hs.get_clock() + + was_called = False + + def on_call() -> None: + nonlocal was_called + was_called = True + + # Create a new looping call. + call = clock.looping_call(on_call, Duration(seconds=1)) + weak_call = weakref.ref(call) + del call # Remove our strong reference to the call. + + # The call should still exist. + self.assertIsNotNone(weak_call()) + + # Advance the clock to trigger the call. + self.reactor.advance(2) + self.assertTrue(was_called) + + # Shut down the clock, which should stop the looping call. + clock.shutdown() + + # The call should have been garbage collected. + self.assertIsNone(weak_call()) + + # Advance the clock again; the call should not be called again. + was_called = False + self.reactor.advance(2) + self.assertFalse(was_called)