From 064237a9a41510020a175b64985fe03a06d9ffd0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 22 Jan 2026 12:37:52 +0000 Subject: [PATCH 01/19] Prune `sliding_sync_connection_required_state` table (#19306) When we change the `required_state` config for a room in sliding sync, we insert a new entry into the `sliding_sync_connection_required_state` table. As the sliding sync connection advances we can accrue a lot of stale entries, so let's clear those out. This is a sort of follow on from #19211 --------- Co-authored-by: Eric Eastwood --- changelog.d/19306.misc | 1 + .../storage/databases/main/sliding_sync.py | 47 +++- tests/storage/test_sliding_sync_tables.py | 242 ++++++++++++++++++ 3 files changed, 287 insertions(+), 3 deletions(-) create mode 100644 changelog.d/19306.misc diff --git a/changelog.d/19306.misc b/changelog.d/19306.misc new file mode 100644 index 0000000000..463f87eac3 --- /dev/null +++ b/changelog.d/19306.misc @@ -0,0 +1 @@ +Prune stale entries from `sliding_sync_connection_required_state` table. diff --git a/synapse/storage/databases/main/sliding_sync.py b/synapse/storage/databases/main/sliding_sync.py index c66002dae4..9a09c0f9b5 100644 --- a/synapse/storage/databases/main/sliding_sync.py +++ b/synapse/storage/databases/main/sliding_sync.py @@ -450,6 +450,9 @@ def _get_and_clear_connection_positions_txn( # Now that we have seen the client has received and used the connection # position, we can delete all the other connection positions. + # + # Note: the rest of the code here assumes this is the only remaining + # connection position. sql = """ DELETE FROM sliding_sync_connection_positions WHERE connection_key = ? AND connection_position != ? @@ -485,9 +488,10 @@ def _get_and_clear_connection_positions_txn( ), ) - required_state_map: dict[int, dict[str, set[str]]] = {} + # Map from required_state_id -> event type -> set of state keys. + stored_required_state_id_maps: dict[int, dict[str, set[str]]] = {} for row in rows: - state = required_state_map[row[0]] = {} + state = stored_required_state_id_maps[row[0]] = {} for event_type, state_key in db_to_json(row[1]): state.setdefault(event_type, set()).add(state_key) @@ -512,7 +516,44 @@ def _get_and_clear_connection_positions_txn( ) in room_config_rows: room_configs[room_id] = RoomSyncConfig( timeline_limit=timeline_limit, - required_state_map=required_state_map[required_state_id], + required_state_map=stored_required_state_id_maps[required_state_id], + ) + + # Clean up any `required_state_id`s that are no longer used by any + # connection position on this connection. + # + # We store the required state config per-connection per-room. Since this + # can be a lot of data, we deduplicate the required state JSON and store + # it separately, with multiple rooms referencing the same `required_state_id`. + # Over time as the required state configs change, some `required_state_id`s + # may no longer be referenced by any room config, so we need + # to clean them up. + # + # We do this by noting that we have pulled out *all* rows from + # `sliding_sync_connection_required_state` for this connection above. We + # have also pulled out all referenced `required_state_id`s for *this* + # connection position, which is the only connection position that + # remains (we deleted the others above). + # + # Thus we can compute the unused `required_state_id`s by looking for any + # `required_state_id`s that are not referenced by the remaining connection + # position. + used_required_state_ids = { + required_state_id for _, _, required_state_id in room_config_rows + } + + unused_required_state_ids = ( + stored_required_state_id_maps.keys() - used_required_state_ids + ) + if unused_required_state_ids: + self.db_pool.simple_delete_many_batch_txn( + txn, + table="sliding_sync_connection_required_state", + keys=("connection_key", "required_state_id"), + values=[ + (connection_key, required_state_id) + for required_state_id in unused_required_state_ids + ], ) # Now look up the per-room stream data. diff --git a/tests/storage/test_sliding_sync_tables.py b/tests/storage/test_sliding_sync_tables.py index cb9be29c5d..f5bbd49663 100644 --- a/tests/storage/test_sliding_sync_tables.py +++ b/tests/storage/test_sliding_sync_tables.py @@ -3120,6 +3120,248 @@ def test_lazy_loading_room_members_last_seen_ts(self) -> None: # The timestamp for user1 should be updated. self.assertGreater(lazy_member_entries[user1_id], prev_timestamp) + def test_pruning_sliding_sync_connection_required_state(self) -> None: + """Test that we prune old entries from + `sliding_sync_connection_required_state`. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + self.helper.send_state( + room_id, EventTypes.Name, {"name": "A room"}, tok=user1_tok + ) + + # Do an initial sync, this will pull down the above room and thus cause + # us to store a single required state entry for the room. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Check that we have an entry in sliding_sync_connection_required_state + connection_pos1 = self.get_success( + SlidingSyncStreamToken.from_string(self.store, from_token) + ).connection_position + + connection_key = self.get_success( + self.store.db_pool.simple_select_one_onecol( + table="sliding_sync_connection_positions", + keyvalues={"connection_position": connection_pos1}, + retcol="connection_key", + ) + ) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + # We expect a single entry here for the one room ID. + self.assertEqual(len(required_state_entries), 1) + first_required_state_id = required_state_entries[0][0] + + # Update the sync body to request more required state, so that we get + # another entry in the table. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Name, ""], + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + + # We need to send a message to cause the room to come down the next + # sync. This shouldn't be necessary, but we don't currently implement + # immediately sending down the room when required_state is updated, + # see https://github.com/element-hq/synapse/issues/18844 + self.helper.send(room_id, "msg1", tok=user1_tok) + + _, from_token = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + # We expect two entries here, one for old state and one for new state. + # The old entry doesn't get pruned yet as the previous from_token could + # still be used. + self.assertEqual(len(required_state_entries), 2) + + # Sync again with the latest token. This time we expect the old + # entry to be pruned. + self.do_sync(sync_body, since=from_token, tok=user1_tok) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + self.assertEqual(len(required_state_entries), 1) + + # Double check that we have pruned the old entry. + self.assertNotEqual(required_state_entries[0][0], first_required_state_id) + + def test_pruning_sliding_sync_connection_required_state_forks(self) -> None: + """Test that we prune entries in + `sliding_sync_connection_required_state` for forked positions. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + self.helper.send_state( + room_id, EventTypes.Name, {"name": "A room"}, tok=user1_tok + ) + + # Do an initial sync, this will pull down the above room and thus cause + # us to store a single required state entry for the room. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Check that we have an entry in sliding_sync_connection_required_state + connection_pos1 = self.get_success( + SlidingSyncStreamToken.from_string(self.store, from_token) + ).connection_position + + connection_key = self.get_success( + self.store.db_pool.simple_select_one_onecol( + table="sliding_sync_connection_positions", + keyvalues={"connection_position": connection_pos1}, + retcol="connection_key", + ) + ) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + # We expect a single entry here for the one room ID. + self.assertEqual(len(required_state_entries), 1) + first_required_state_id = required_state_entries[0][0] + + # Update the sync body to request more required state, so that we get + # another entry in the table. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Name, ""], + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + + # We need to send a message to cause the room to come down the next + # sync. This shouldn't be necessary, but we don't currently implement + # immediately sending down the room when required_state is updated, + # see https://github.com/element-hq/synapse/issues/18844 + self.helper.send(room_id, "msg1", tok=user1_tok) + + _, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + # We expect two entries here, one for old state and one for new state. + # The old entry doesn't get pruned yet as the previous from_token could + # still be used. + self.assertEqual(len(required_state_entries), 2) + second_required_state_id = sorted(required_state_entries)[1][0] + + # We sync again, but with the old token, creating a fork in the + # connection positions. We change the sync body again so that the + # `required_state` doesn't get deduplicated. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Topic, ""], + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # There should now be three entries, one for each of the required_state. + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + self.assertEqual(len(required_state_entries), 3) + + # Sync again with the latest token. This should prune all except the + # latest entry in `sliding_sync_connection_required_state`. + _, from_token = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + required_state_entries = self.get_success( + self.store.db_pool.simple_select_list( + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + ) + + self.assertEqual(len(required_state_entries), 1) + + # Double check that we have pruned the old entry. + self.assertNotEqual(required_state_entries[0][0], first_required_state_id) + self.assertNotEqual(required_state_entries[0][0], second_required_state_id) + class SlidingSyncTablesBackgroundUpdatesTestCase(SlidingSyncTablesTestCaseBase): """ From 9a743a4a70547c15c03981f2bccb0ba85e70c43e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 22 Jan 2026 12:56:36 +0000 Subject: [PATCH 02/19] Don't retry joining partial state rooms all at once (#19402) On restart we retry joining partially stated rooms, but if you have a bunch in the database this can cause big performance issues if we start them all at once. So we stagger them. --- changelog.d/19402.misc | 1 + synapse/handlers/federation.py | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 changelog.d/19402.misc diff --git a/changelog.d/19402.misc b/changelog.d/19402.misc new file mode 100644 index 0000000000..0e1ee104a7 --- /dev/null +++ b/changelog.d/19402.misc @@ -0,0 +1 @@ +Don't retry joining partial state rooms all at once on startup. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 7808f8928b..14805ac80f 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1788,6 +1788,10 @@ async def _resume_partial_state_room_sync(self) -> None: room_id=room_id, ) + # We don't start all the partial state room syncs at once, to avoid + # overloading the process. + await self.clock.sleep(Duration(milliseconds=10)) + def _start_partial_state_room_sync( self, initial_destination: str | None, From d6b45a7c8c53590635717a69a22660c6f460ceab Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 22 Jan 2026 11:18:49 -0600 Subject: [PATCH 03/19] Update and align Grafana dashboard to use regex matching for `job=~"$job"` (#19400) We're already using `job=~"$job"` in the majority of the other panels. This is just aligning the stragglers. ### Background For a variable in Grafana, when the "All" value is selected, it translates the variable into a wildcard regex. By default, this is just a giant list of all of the possible values or'd together. It's possible to define a "custom all value" like we've done for `index` as `.*` and feels like we should also do this in a follow-up PR. Input: ``` job="$job" ``` Before (using **exact** match) -> resulted in matching nothing: ``` job="(appservice|background_worker|client_reader|device_lists|event_creator|event_persister|federation_inbound|federation_reader|federation_sender|media_repository|pusher|stream_writers|synapse|synchrotron|user_dir)"" ``` After (using **regex** match) -> matches all jobs as expected: ``` job=~"(appservice|background_worker|client_reader|device_lists|event_creator|event_persister|federation_inbound|federation_reader|federation_sender|media_repository|pusher|stream_writers|synapse|synchrotron|user_dir)"" ``` --- changelog.d/19400.misc | 1 + contrib/grafana/synapse.json | 42 ++++++++++++++++++------------------ 2 files changed, 22 insertions(+), 21 deletions(-) create mode 100644 changelog.d/19400.misc diff --git a/changelog.d/19400.misc b/changelog.d/19400.misc new file mode 100644 index 0000000000..33b0cb509c --- /dev/null +++ b/changelog.d/19400.misc @@ -0,0 +1 @@ +Update and align Grafana dashboard to use regex matching for `job` selectors (`job=~"$job"`) so the "all" value works correctly across all panels. diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index 5a4315bc96..af6c9d7b9e 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -2268,7 +2268,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "sum(avg_over_time(synapse_http_server_in_flight_requests_count{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(avg_over_time(synapse_http_server_in_flight_requests_count{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", "interval": "", "legendFormat": "Total", "refId": "B" @@ -4167,7 +4167,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4210,7 +4210,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4253,7 +4253,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache_hits{job=\"$job\",index=~\"$index\",name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache_hits{job=~\"$job\",index=~\"$index\",name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4268,7 +4268,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4310,7 +4310,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache_hits{job=\"$job\",index=~\"$index\",name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache_hits{job=~\"$job\",index=~\"$index\",name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4325,7 +4325,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4367,7 +4367,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache_hits{job=\"$job\",index=~\"$index\",name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache_hits{job=~\"$job\",index=~\"$index\",name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4382,7 +4382,7 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": true, - "expr": "sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))", + "expr": "sum(rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",server_name=\"$server_name\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -4712,7 +4712,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "histogram_quantile(0.99, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=\"$job\"}[$bucket_size])) by (le))", + "expr": "histogram_quantile(0.99, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=~\"$job\"}[$bucket_size])) by (le))", "format": "time_series", "intervalFactor": 1, "legendFormat": "99%", @@ -4722,7 +4722,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "histogram_quantile(0.9, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=\"$job\"}[$bucket_size])) by (le))", + "expr": "histogram_quantile(0.9, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=~\"$job\"}[$bucket_size])) by (le))", "format": "time_series", "intervalFactor": 1, "legendFormat": "90%", @@ -4732,7 +4732,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "histogram_quantile(0.75, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=\"$job\"}[$bucket_size])) by (le))", + "expr": "histogram_quantile(0.75, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=~\"$job\"}[$bucket_size])) by (le))", "format": "time_series", "intervalFactor": 1, "legendFormat": "75%", @@ -4742,7 +4742,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "histogram_quantile(0.5, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=\"$job\"}[$bucket_size])) by (le))", + "expr": "histogram_quantile(0.5, sum(rate(synapse_storage_schedule_time_bucket{index=~\"$index\",server_name=\"$server_name\",job=~\"$job\"}[$bucket_size])) by (le))", "format": "time_series", "intervalFactor": 1, "legendFormat": "50%", @@ -6943,7 +6943,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "rate(synapse_notifier_users_woken_by_stream_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_notifier_users_woken_by_stream_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "format": "time_series", "hide": false, "intervalFactor": 2, @@ -7122,7 +7122,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_notified_presence_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_notified_presence_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Notified", "refId": "A" @@ -7131,7 +7131,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_federation_presence_out_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_federation_presence_out_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Remote ping", "refId": "B" @@ -7140,7 +7140,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_presence_updates_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_presence_updates_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Total updates", "refId": "C" @@ -7149,7 +7149,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_federation_presence_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_federation_presence_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Remote updates", "refId": "D" @@ -7158,7 +7158,7 @@ "datasource": { "uid": "${DS_PROMETHEUS}" }, - "expr": "rate(synapse_handler_presence_bump_active_time_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_bump_active_time_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "Bump active time", "refId": "E" @@ -7193,7 +7193,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "rate(synapse_handler_presence_state_transition_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_state_transition_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "{{from}} -> {{to}}", "range": true, @@ -7229,7 +7229,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "rate(synapse_handler_presence_notify_reason_total{job=\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", + "expr": "rate(synapse_handler_presence_notify_reason_total{job=~\"$job\",index=~\"$index\",server_name=\"$server_name\"}[$bucket_size])", "interval": "", "legendFormat": "{{reason}}", "range": true, From 826a7dd29aea7041fb25b98d360da4dbedf22712 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 22 Jan 2026 14:07:22 -0600 Subject: [PATCH 04/19] Update "Event Send Time Quantiles" graph to only use dots for the event persistence rate (#19399) This is the same thing we already do in the [`matrix.org` dashboard](https://grafana.matrix.org/d/000000012/synapse) and although the purple dots aren't new (introduced in https://github.com/matrix-org/synapse/pull/10001), you can see that was the intention in https://github.com/element-hq/synapse/pull/18510. I think this was just how our contrib dashboard looked at the time and perhaps was a Grafana version mismatch thing which is why it didn't translate. --- changelog.d/19399.misc | 1 + contrib/grafana/synapse.json | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 changelog.d/19399.misc diff --git a/changelog.d/19399.misc b/changelog.d/19399.misc new file mode 100644 index 0000000000..0d02904f40 --- /dev/null +++ b/changelog.d/19399.misc @@ -0,0 +1 @@ +Update "Event Send Time Quantiles" graph to only use dots for the event persistence rate (Grafana dashboard). diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index af6c9d7b9e..ceacc10369 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -416,6 +416,10 @@ { "id": "custom.axisPlacement", "value": "right" + }, + { + "id": "custom.fillOpacity", + "value": 0 } ] }, @@ -443,6 +447,10 @@ { "id": "custom.axisPlacement", "value": "right" + }, + { + "id": "custom.fillOpacity", + "value": 0 } ] } From 24df0edb5fcc53eceb44b77649f3da6cebe41e18 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 26 Jan 2026 07:27:26 -0500 Subject: [PATCH 05/19] Limit health endpoint to `/health$` (#19405) --- changelog.d/19405.misc | 1 + synapse/rest/health.py | 12 ++++++++++++ tests/rest/test_health.py | 13 +++++++++++++ 3 files changed, 26 insertions(+) create mode 100644 changelog.d/19405.misc diff --git a/changelog.d/19405.misc b/changelog.d/19405.misc new file mode 100644 index 0000000000..f3be5b2027 --- /dev/null +++ b/changelog.d/19405.misc @@ -0,0 +1 @@ +Disallow requests to the health endpoint from containing trailing path characters. \ No newline at end of file diff --git a/synapse/rest/health.py b/synapse/rest/health.py index ae7cab7a2d..9c7a846076 100644 --- a/synapse/rest/health.py +++ b/synapse/rest/health.py @@ -22,6 +22,8 @@ from twisted.web.resource import Resource from twisted.web.server import Request +from synapse.api.errors import Codes + class HealthResource(Resource): """A resource that does nothing except return a 200 with a body of `OK`, @@ -34,5 +36,15 @@ class HealthResource(Resource): isLeaf = 1 def render_GET(self, request: Request) -> bytes: + # Prevent path traversal by ensuring the request path is exactly /health. + if request.path != b"/health": + request.setResponseCode(404) + body = ( + '{"errcode":"' + + Codes.UNRECOGNIZED + + '","error":"Unrecognized request"}' + ) + return body.encode("utf-8") + request.setHeader(b"Content-Type", b"text/plain") return b"OK" diff --git a/tests/rest/test_health.py b/tests/rest/test_health.py index bdbfce796a..17249b4eae 100644 --- a/tests/rest/test_health.py +++ b/tests/rest/test_health.py @@ -33,3 +33,16 @@ def test_health(self) -> None: self.assertEqual(channel.code, 200) self.assertEqual(channel.result["body"], b"OK") + + def test_health_path_traversal(self) -> None: + """ + Test that the health endpoint does not allow extra path segments, + which could be used to access other resources. + + Regression test for: https://github.com/element-hq/synapse/issues/19395 + """ + channel = self.make_request("GET", "/health/extra/path", shorthand=False) + + self.assertEqual(channel.code, 404) + self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED") + self.assertIn("error", channel.json_body) From e7dd5d3cfb3481c927c4c4b253a06428e1259784 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jan 2026 16:05:24 +0000 Subject: [PATCH 06/19] Bump actions/checkout from 6.0.1 to 6.0.2 in the minor-and-patches group (#19407) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps the minor-and-patches group with 1 update: [actions/checkout](https://github.com/actions/checkout). Updates `actions/checkout` from 6.0.1 to 6.0.2
Release notes

Sourced from actions/checkout's releases.

v6.0.2

What's Changed

Full Changelog: https://github.com/actions/checkout/compare/v6.0.1...v6.0.2

Changelog

Sourced from actions/checkout's changelog.

Changelog

v6.0.2

v6.0.1

v6.0.0

v5.0.1

v5.0.0

v4.3.1

v4.3.0

v4.2.2

v4.2.1

v4.2.0

v4.1.7

v4.1.6

... (truncated)

Commits
  • de0fac2 Fix tag handling: preserve annotations and explicit fetch-tags (#2356)
  • 064fe7f Add orchestration_id to git user-agent when ACTIONS_ORCHESTRATION_ID is set (...
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=6.0.1&new-version=6.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore major version` will close this group update PR and stop Dependabot creating any more for the specific dependency's major version (unless you unignore this specific dependency's major version or upgrade to it yourself) - `@dependabot ignore minor version` will close this group update PR and stop Dependabot creating any more for the specific dependency's minor version (unless you unignore this specific dependency's minor version or upgrade to it yourself) - `@dependabot ignore ` will close this group update PR and stop Dependabot creating any more for the specific dependency (unless you unignore this specific dependency or upgrade to it yourself) - `@dependabot unignore ` will remove all of the ignore conditions of the specified dependency - `@dependabot unignore ` will remove the ignore condition of the specified dependency and ignore conditions
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docker.yml | 2 +- .github/workflows/docs-pr.yaml | 4 +- .github/workflows/docs.yaml | 2 +- .github/workflows/fix_lint.yaml | 2 +- .github/workflows/latest_deps.yml | 10 ++--- .github/workflows/poetry_lockfile.yaml | 2 +- .github/workflows/push_complement_image.yml | 6 +-- .github/workflows/release-artifacts.yml | 8 ++-- .github/workflows/schema.yaml | 4 +- .github/workflows/tests.yml | 44 ++++++++++----------- .github/workflows/triage_labelled.yml | 2 +- .github/workflows/twisted_trunk.yml | 10 ++--- 12 files changed, 48 insertions(+), 48 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 706eacbe90..bcd65b2f60 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -31,7 +31,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Extract version from pyproject.toml # Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml index 4480fd80bc..424857822b 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml @@ -13,7 +13,7 @@ jobs: name: GitHub Pages runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: # Fetch all history so that the schema_versions script works. fetch-depth: 0 @@ -50,7 +50,7 @@ jobs: name: Check links in documentation runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Setup mdbook uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0 diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index d64adf924c..11fd8c6d6c 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -50,7 +50,7 @@ jobs: needs: - pre steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: # Fetch all history so that the schema_versions script works. fetch-depth: 0 diff --git a/.github/workflows/fix_lint.yaml b/.github/workflows/fix_lint.yaml index a0a37f2a67..babc3bc5de 100644 --- a/.github/workflows/fix_lint.yaml +++ b/.github/workflows/fix_lint.yaml @@ -18,7 +18,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 9908633f8e..a85551854c 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -42,7 +42,7 @@ jobs: if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master with: @@ -77,7 +77,7 @@ jobs: postgres-version: "14" steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -152,7 +152,7 @@ jobs: BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -202,7 +202,7 @@ jobs: steps: - name: Check out synapse codebase - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: path: synapse @@ -234,7 +234,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/poetry_lockfile.yaml b/.github/workflows/poetry_lockfile.yaml index 29b5950ab8..fb4c449b58 100644 --- a/.github/workflows/poetry_lockfile.yaml +++ b/.github/workflows/poetry_lockfile.yaml @@ -16,7 +16,7 @@ jobs: name: "Check locked dependencies have sdists" runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: '3.x' diff --git a/.github/workflows/push_complement_image.yml b/.github/workflows/push_complement_image.yml index b662b98754..e6d0894e83 100644 --- a/.github/workflows/push_complement_image.yml +++ b/.github/workflows/push_complement_image.yml @@ -33,17 +33,17 @@ jobs: packages: write steps: - name: Checkout specific branch (debug build) - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 if: github.event_name == 'workflow_dispatch' with: ref: ${{ inputs.branch }} - name: Checkout clean copy of develop (scheduled build) - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 if: github.event_name == 'schedule' with: ref: develop - name: Checkout clean copy of master (on-push) - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 if: github.event_name == 'push' with: ref: master diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 139d02866f..5f5b64dc64 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -27,7 +27,7 @@ jobs: name: "Calculate list of debian distros" runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -55,7 +55,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: path: src @@ -123,7 +123,7 @@ jobs: os: "ubuntu-24.04-arm" steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: @@ -161,7 +161,7 @@ jobs: if: ${{ !startsWith(github.ref, 'refs/pull/') }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.10" diff --git a/.github/workflows/schema.yaml b/.github/workflows/schema.yaml index 356d155807..b068e976db 100644 --- a/.github/workflows/schema.yaml +++ b/.github/workflows/schema.yaml @@ -14,7 +14,7 @@ jobs: name: Ensure Synapse config schema is valid runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -40,7 +40,7 @@ jobs: name: Ensure generated documentation is up-to-date runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f93c25c01f..fc544fcfde 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -86,7 +86,7 @@ jobs: if: ${{ needs.changes.outputs.linting == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master with: @@ -106,7 +106,7 @@ jobs: if: ${{ needs.changes.outputs.linting == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -116,7 +116,7 @@ jobs: check-lockfile: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -129,7 +129,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Setup Poetry uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 @@ -151,7 +151,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -187,7 +187,7 @@ jobs: lint-crlf: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Check line endings run: scripts-dev/check_line_terminators.sh @@ -196,7 +196,7 @@ jobs: if: ${{ github.event_name == 'pull_request' && (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.event.pull_request.user.login != 'dependabot[bot]' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 0 @@ -214,7 +214,7 @@ jobs: if: ${{ needs.changes.outputs.rust == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -233,7 +233,7 @@ jobs: if: ${{ needs.changes.outputs.rust == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -251,7 +251,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -287,7 +287,7 @@ jobs: if: ${{ needs.changes.outputs.rust == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -307,7 +307,7 @@ jobs: needs: changes if: ${{ needs.changes.outputs.linting_readme == 'true' }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -354,7 +354,7 @@ jobs: needs: linting-done runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: "3.x" @@ -375,7 +375,7 @@ jobs: job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - run: sudo apt-get -qq install xmlsec1 - name: Set up PostgreSQL ${{ matrix.job.postgres-version }} if: ${{ matrix.job.postgres-version }} @@ -431,7 +431,7 @@ jobs: - changes runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -494,7 +494,7 @@ jobs: extras: ["all"] steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 # Install libs necessary for PyPy to build binary wheels for dependencies - run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 @@ -544,7 +544,7 @@ jobs: job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Prepare test blacklist run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers @@ -591,7 +591,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - run: sudo apt-get -qq install xmlsec1 postgresql-client - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: @@ -634,7 +634,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Add PostgreSQL apt repository # We need a version of pg_dump that can handle the version of # PostgreSQL being tested against. The Ubuntu package repository lags @@ -689,7 +689,7 @@ jobs: steps: - name: Checkout synapse codebase - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: path: synapse @@ -739,7 +739,7 @@ jobs: - changes steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -759,7 +759,7 @@ jobs: - changes steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master diff --git a/.github/workflows/triage_labelled.yml b/.github/workflows/triage_labelled.yml index 27ff1d80cd..31dddab012 100644 --- a/.github/workflows/triage_labelled.yml +++ b/.github/workflows/triage_labelled.yml @@ -22,7 +22,7 @@ jobs: # This field is case-sensitive. TARGET_STATUS: Needs info steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: # Only clone the script file we care about, instead of the whole repo. sparse-checkout: .ci/scripts/triage_labelled_issue.sh diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 2433632a7f..14b48317db 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -43,7 +43,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -70,7 +70,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - run: sudo apt-get -qq install xmlsec1 - name: Install Rust @@ -117,7 +117,7 @@ jobs: - ${{ github.workspace }}:/src steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @@ -175,7 +175,7 @@ jobs: steps: - name: Run actions/checkout@v4 for synapse - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: path: synapse @@ -217,7 +217,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From ede0f4f56b1b2e2bf66565c0b4d7990bd028c301 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Jan 2026 23:08:00 +0000 Subject: [PATCH 07/19] Bump python-multipart from 0.0.20 to 0.0.22 (#19411) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-multipart](https://github.com/Kludex/python-multipart) from 0.0.20 to 0.0.22.
Release notes

Sourced from python-multipart's releases.

Version 0.0.22

What's Changed

  • Drop directory path from filename in File 9433f4b.

Full Changelog: https://github.com/Kludex/python-multipart/compare/0.0.21...0.0.22

Version 0.0.21

What's Changed

New Contributors

Full Changelog: https://github.com/Kludex/python-multipart/compare/0.0.20...0.0.21

Changelog

Sourced from python-multipart's changelog.

0.0.22 (2026-01-25)

  • Drop directory path from filename in File 9433f4b.

0.0.21 (2025-12-17)

  • Add support for Python 3.14 and drop EOL 3.8 and 3.9 #216.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-multipart&package-manager=pip&previous-version=0.0.20&new-version=0.0.22)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/element-hq/synapse/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 97daf02cae..2d75fb50c8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2322,14 +2322,14 @@ six = ">=1.5" [[package]] name = "python-multipart" -version = "0.0.20" +version = "0.0.22" description = "A streaming multipart parser for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, - {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, + {file = "python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155"}, + {file = "python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58"}, ] [[package]] From d02796fcc48203f421b033d06bacd547b18fe4a8 Mon Sep 17 00:00:00 2001 From: razvp <156998520+razvp@users.noreply.github.com> Date: Wed, 28 Jan 2026 18:12:34 +0200 Subject: [PATCH 08/19] Bump `pyo3` from 0.26.0 to 0.27.2 and `pythonize` from 0.26.0 to 0.27.0 (#19412) Hello, I'm writing on behalf of the Citadel product developed by ERCOM. This PR bumps `pyo3` from 0.26.0 to 0.27.2 and `pythonize` from 0.26.0 to 0.27.0. For the code migration I followed the guide found here: [link](https://pyo3.rs/v0.27.0/migration.html). --- Cargo.lock | 24 ++++++++++++------------ changelog.d/19412.misc | 1 + rust/Cargo.toml | 4 ++-- rust/src/http.rs | 14 +++++++------- rust/src/http_client.rs | 5 ++++- rust/src/push/mod.rs | 32 +++++++++++++++++++------------- 6 files changed, 45 insertions(+), 35 deletions(-) create mode 100644 changelog.d/19412.misc diff --git a/Cargo.lock b/Cargo.lock index e5ce9325df..0edfef6869 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -813,9 +813,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383" +checksum = "ab53c047fcd1a1d2a8820fe84f05d6be69e9526be40cb03b73f86b6b03e6d87d" dependencies = [ "anyhow", "indoc", @@ -831,18 +831,18 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f" +checksum = "b455933107de8642b4487ed26d912c2d899dec6114884214a0b3bb3be9261ea6" dependencies = [ "target-lexicon", ] [[package]] name = "pyo3-ffi" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105" +checksum = "1c85c9cbfaddf651b1221594209aed57e9e5cff63c4d11d1feead529b872a089" dependencies = [ "libc", "pyo3-build-config", @@ -861,9 +861,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded" +checksum = "0a5b10c9bf9888125d917fb4d2ca2d25c8df94c7ab5a52e13313a07e050a3b02" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -873,9 +873,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf" +checksum = "03b51720d314836e53327f5871d4c0cfb4fb37cc2c4a11cc71907a86342c40f9" dependencies = [ "heck", "proc-macro2", @@ -886,9 +886,9 @@ dependencies = [ [[package]] name = "pythonize" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11e06e4cff9be2bbf2bddf28a486ae619172ea57e79787f856572878c62dcfe2" +checksum = "a3a8f29db331e28c332c63496cfcbb822aca3d7320bc08b655d7fd0c29c50ede" dependencies = [ "pyo3", "serde", diff --git a/changelog.d/19412.misc b/changelog.d/19412.misc new file mode 100644 index 0000000000..6b811be799 --- /dev/null +++ b/changelog.d/19412.misc @@ -0,0 +1 @@ +Bump `pyo3` from 0.26.0 to 0.27.2 and `pythonize` from 0.26.0 to 0.27.0. Contributed by @razvp @ ERCOM. \ No newline at end of file diff --git a/rust/Cargo.toml b/rust/Cargo.toml index e8321d159b..350701d327 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -30,14 +30,14 @@ http = "1.1.0" lazy_static = "1.4.0" log = "0.4.17" mime = "0.3.17" -pyo3 = { version = "0.26.0", features = [ +pyo3 = { version = "0.27.2", features = [ "macros", "anyhow", "abi3", "abi3-py310", ] } pyo3-log = "0.13.1" -pythonize = "0.26.0" +pythonize = "0.27.0" regex = "1.6.0" sha2 = "0.10.8" serde = { version = "1.0.144", features = ["derive"] } diff --git a/rust/src/http.rs b/rust/src/http.rs index 63ed05be54..8d462f2e5e 100644 --- a/rust/src/http.rs +++ b/rust/src/http.rs @@ -32,7 +32,7 @@ fn read_io_body(body: &Bound<'_, PyAny>, chunk_size: usize) -> PyResult { let mut buf = BytesMut::new(); loop { let bound = &body.call_method1("read", (chunk_size,))?; - let bytes: &Bound<'_, PyBytes> = bound.downcast()?; + let bytes: &Bound<'_, PyBytes> = bound.cast()?; if bytes.as_bytes().is_empty() { return Ok(buf.into()); } @@ -58,12 +58,12 @@ pub fn http_request_from_twisted(request: &Bound<'_, PyAny>) -> PyResult = bound.downcast()?; + let uri: &Bound<'_, PyBytes> = bound.cast()?; *req.uri_mut() = Uri::try_from(uri.as_bytes()).map_err(|_| PyValueError::new_err("invalid uri"))?; let bound = &request.getattr("method")?; - let method: &Bound<'_, PyBytes> = bound.downcast()?; + let method: &Bound<'_, PyBytes> = bound.cast()?; *req.method_mut() = Method::from_bytes(method.as_bytes()) .map_err(|_| PyValueError::new_err("invalid method"))?; @@ -74,17 +74,17 @@ pub fn http_request_from_twisted(request: &Bound<'_, PyAny>) -> PyResult = header.downcast()?; + let header: &Bound<'_, PyTuple> = header.cast()?; let bound = &header.get_item(0)?; - let name: &Bound<'_, PyBytes> = bound.downcast()?; + let name: &Bound<'_, PyBytes> = bound.cast()?; let name = HeaderName::from_bytes(name.as_bytes()) .map_err(|_| PyValueError::new_err("invalid header name"))?; let bound = &header.get_item(1)?; - let values: &Bound<'_, PySequence> = bound.downcast()?; + let values: &Bound<'_, PySequence> = bound.cast()?; for index in 0..values.len()? { let bound = &values.get_item(index)?; - let value: &Bound<'_, PyBytes> = bound.downcast()?; + let value: &Bound<'_, PyBytes> = bound.cast()?; let value = HeaderValue::from_bytes(value.as_bytes()) .map_err(|_| PyValueError::new_err("invalid header value"))?; req.headers_mut().append(name.clone(), value); diff --git a/rust/src/http_client.rs b/rust/src/http_client.rs index 4bd80c8e04..b1e4f753b8 100644 --- a/rust/src/http_client.rs +++ b/rust/src/http_client.rs @@ -316,5 +316,8 @@ fn make_deferred_yieldable<'py>( func }); - make_deferred_yieldable.call1(py, (deferred,))?.extract(py) + make_deferred_yieldable + .call1(py, (deferred,))? + .extract(py) + .map_err(Into::into) } diff --git a/rust/src/push/mod.rs b/rust/src/push/mod.rs index b0cedd758c..ac9b9c93e4 100644 --- a/rust/src/push/mod.rs +++ b/rust/src/push/mod.rs @@ -273,14 +273,16 @@ pub enum SimpleJsonValue { Null, } -impl<'source> FromPyObject<'source> for SimpleJsonValue { - fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { - if let Ok(s) = ob.downcast::() { +impl<'source> FromPyObject<'_, 'source> for SimpleJsonValue { + type Error = PyErr; + + fn extract(ob: Borrowed<'_, 'source, PyAny>) -> Result { + if let Ok(s) = ob.cast::() { Ok(SimpleJsonValue::Str(Cow::Owned(s.to_string()))) // A bool *is* an int, ensure we try bool first. - } else if let Ok(b) = ob.downcast::() { + } else if let Ok(b) = ob.cast::() { Ok(SimpleJsonValue::Bool(b.extract()?)) - } else if let Ok(i) = ob.downcast::() { + } else if let Ok(i) = ob.cast::() { Ok(SimpleJsonValue::Int(i.extract()?)) } else if ob.is_none() { Ok(SimpleJsonValue::Null) @@ -301,12 +303,14 @@ pub enum JsonValue { Value(SimpleJsonValue), } -impl<'source> FromPyObject<'source> for JsonValue { - fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { - if let Ok(l) = ob.downcast::() { +impl<'source> FromPyObject<'_, 'source> for JsonValue { + type Error = PyErr; + + fn extract(ob: Borrowed<'_, 'source, PyAny>) -> Result { + if let Ok(l) = ob.cast::() { match l .iter() - .map(|it| SimpleJsonValue::extract_bound(&it)) + .map(|it| SimpleJsonValue::extract(it.as_borrowed())) .collect() { Ok(a) => Ok(JsonValue::Array(a)), @@ -314,7 +318,7 @@ impl<'source> FromPyObject<'source> for JsonValue { "Can't convert to JsonValue::Array: {e}" ))), } - } else if let Ok(v) = SimpleJsonValue::extract_bound(ob) { + } else if let Ok(v) = SimpleJsonValue::extract(ob) { Ok(JsonValue::Value(v)) } else { Err(PyTypeError::new_err(format!( @@ -385,9 +389,11 @@ impl<'source> IntoPyObject<'source> for Condition { } } -impl<'source> FromPyObject<'source> for Condition { - fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { - Ok(depythonize(ob)?) +impl<'source> FromPyObject<'_, 'source> for Condition { + type Error = PyErr; + + fn extract(ob: Borrowed<'_, 'source, PyAny>) -> Result { + Ok(depythonize(&ob)?) } } From 0dfcffab0fa29be1766ea42391bdbea4cad333d6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 30 Jan 2026 10:26:53 +0000 Subject: [PATCH 09/19] Fix looping calls not getting GCed. (#19416) The `Clock` tracks looping calls to allow cancelling of all looping calls. However, this stopped them from getting garbage collected. This was introduced in https://github.com/element-hq/synapse/pull/18828 Fixes https://github.com/element-hq/synapse/issues/19392 --- changelog.d/19416.bugfix | 1 + synapse/util/clock.py | 8 +++-- tests/util/test_clock.py | 77 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 84 insertions(+), 2 deletions(-) create mode 100644 changelog.d/19416.bugfix create mode 100644 tests/util/test_clock.py diff --git a/changelog.d/19416.bugfix b/changelog.d/19416.bugfix new file mode 100644 index 0000000000..f0c2872410 --- /dev/null +++ b/changelog.d/19416.bugfix @@ -0,0 +1 @@ +Fix memory leak caused by not cleaning up stopped looping calls. Introduced in v1.140.0. diff --git a/synapse/util/clock.py b/synapse/util/clock.py index 4355704f8a..a3872d6f93 100644 --- a/synapse/util/clock.py +++ b/synapse/util/clock.py @@ -15,10 +15,12 @@ import logging +from functools import wraps from typing import ( Any, Callable, ) +from weakref import WeakSet from typing_extensions import ParamSpec from zope.interface import implementer @@ -86,7 +88,7 @@ def __init__(self, reactor: ISynapseThreadlessReactor, server_name: str) -> None self._delayed_call_id: int = 0 """Unique ID used to track delayed calls""" - self._looping_calls: list[LoopingCall] = [] + self._looping_calls: WeakSet[LoopingCall] = WeakSet() """List of active looping calls""" self._call_id_to_delayed_call: dict[int, IDelayedCall] = {} @@ -193,6 +195,7 @@ def _looping_call_common( if now: looping_call_context_string = "looping_call_now" + @wraps(f) def wrapped_f(*args: P.args, **kwargs: P.kwargs) -> Deferred: clock_debug_logger.debug( "%s(%s): Executing callback", looping_call_context_string, instance_id @@ -240,7 +243,7 @@ def wrapped_f(*args: P.args, **kwargs: P.kwargs) -> Deferred: with context.PreserveLoggingContext(): d = call.start(duration.as_secs(), now=now) d.addErrback(log_failure, "Looping call died", consumeErrors=False) - self._looping_calls.append(call) + self._looping_calls.add(call) clock_debug_logger.debug( "%s(%s): Scheduled looping call every %sms later", @@ -302,6 +305,7 @@ def call_later( if self._is_shutdown: raise Exception("Cannot start delayed call. Clock has been shutdown") + @wraps(callback) def wrapped_callback(*args: Any, **kwargs: Any) -> None: clock_debug_logger.debug("call_later(%s): Executing callback", call_id) diff --git a/tests/util/test_clock.py b/tests/util/test_clock.py new file mode 100644 index 0000000000..6c5a1158f5 --- /dev/null +++ b/tests/util/test_clock.py @@ -0,0 +1,77 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 Element Creations Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# + +import weakref + +from synapse.util.duration import Duration + +from tests.unittest import HomeserverTestCase + + +class ClockTestCase(HomeserverTestCase): + def test_looping_calls_are_gced(self) -> None: + """Test that looping calls are garbage collected after being stopped. + + The `Clock` tracks looping calls so to allow stopping of all looping + calls via the clock. + """ + clock = self.hs.get_clock() + + # Create a new looping call, and take a weakref to it. + call = clock.looping_call(lambda: None, Duration(seconds=1)) + + weak_call = weakref.ref(call) + + # Stop the looping call. It should get garbage collected after this. + call.stop() + + # Delete our strong reference to the call (otherwise it won't get garbage collected). + del call + + # Check that the call has been garbage collected. + self.assertIsNone(weak_call()) + + def test_looping_calls_stopped_on_clock_shutdown(self) -> None: + """Test that looping calls are stopped when the clock is shut down.""" + clock = self.hs.get_clock() + + was_called = False + + def on_call() -> None: + nonlocal was_called + was_called = True + + # Create a new looping call. + call = clock.looping_call(on_call, Duration(seconds=1)) + weak_call = weakref.ref(call) + del call # Remove our strong reference to the call. + + # The call should still exist. + self.assertIsNotNone(weak_call()) + + # Advance the clock to trigger the call. + self.reactor.advance(2) + self.assertTrue(was_called) + + # Shut down the clock, which should stop the looping call. + clock.shutdown() + + # The call should have been garbage collected. + self.assertIsNone(weak_call()) + + # Advance the clock again; the call should not be called again. + was_called = False + self.reactor.advance(2) + self.assertFalse(was_called) From e59e5490941c078c1b74fe193f8d7d670ac6f58f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Feb 2026 23:22:15 +0000 Subject: [PATCH 10/19] Bump actions/setup-go from 6.1.0 to 6.2.0 in the minor-and-patches group (#19423) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps the minor-and-patches group with 1 update: [actions/setup-go](https://github.com/actions/setup-go). Updates `actions/setup-go` from 6.1.0 to 6.2.0
Release notes

Sourced from actions/setup-go's releases.

v6.2.0

What's Changed

Enhancements

Dependency updates

New Contributors

Full Changelog: https://github.com/actions/setup-go/compare/v6...v6.2.0

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/setup-go&package-manager=github_actions&previous-version=6.1.0&new-version=6.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore major version` will close this group update PR and stop Dependabot creating any more for the specific dependency's major version (unless you unignore this specific dependency's major version or upgrade to it yourself) - `@dependabot ignore minor version` will close this group update PR and stop Dependabot creating any more for the specific dependency's minor version (unless you unignore this specific dependency's minor version or upgrade to it yourself) - `@dependabot ignore ` will close this group update PR and stop Dependabot creating any more for the specific dependency (unless you unignore this specific dependency or upgrade to it yourself) - `@dependabot unignore ` will remove all of the ignore conditions of the specified dependency - `@dependabot unignore ` will remove the ignore condition of the specified dependency and ignore conditions
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/latest_deps.yml | 2 +- .github/workflows/tests.yml | 2 +- .github/workflows/twisted_trunk.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index a85551854c..9e0f2c384e 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -209,7 +209,7 @@ jobs: - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0 + - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index fc544fcfde..bb2e167e23 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -702,7 +702,7 @@ jobs: - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0 + - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 14b48317db..bd5c79f16d 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -182,7 +182,7 @@ jobs: - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0 + - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod From 065ff194c23ca9c326460ace5b55edc0e5eff3d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Feb 2026 00:19:25 +0000 Subject: [PATCH 11/19] Bump serde_json from 1.0.145 to 1.0.148 in the patches group across 1 directory (#19391) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps the patches group with 1 update in the / directory: [serde_json](https://github.com/serde-rs/json). Updates `serde_json` from 1.0.145 to 1.0.148
Release notes

Sourced from serde_json's releases.

v1.0.148

  • Update zmij dependency to 1.0

v1.0.147

  • Switch float-to-string algorithm from Ryū to Żmij for better f32 and f64 serialization performance (#1304)

v1.0.146

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=serde_json&package-manager=cargo&previous-version=1.0.145&new-version=1.0.148)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore major version` will close this group update PR and stop Dependabot creating any more for the specific dependency's major version (unless you unignore this specific dependency's major version or upgrade to it yourself) - `@dependabot ignore minor version` will close this group update PR and stop Dependabot creating any more for the specific dependency's minor version (unless you unignore this specific dependency's minor version or upgrade to it yourself) - `@dependabot ignore ` will close this group update PR and stop Dependabot creating any more for the specific dependency (unless you unignore this specific dependency or upgrade to it yourself) - `@dependabot unignore ` will remove all of the ignore conditions of the specified dependency - `@dependabot unignore ` will remove the ignore condition of the specified dependency and ignore conditions
--------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Devon Hudson --- .github/workflows/tests.yml | 2 +- Cargo.lock | 12 +++++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index bb2e167e23..715dfa93d9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -238,7 +238,7 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master with: - toolchain: nightly-2025-04-23 + toolchain: nightly-2026-02-01 components: clippy - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 diff --git a/Cargo.lock b/Cargo.lock index 0edfef6869..8d1cd967d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1207,15 +1207,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.145" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ "itoa", "memchr", - "ryu", "serde", "serde_core", + "zmij", ] [[package]] @@ -1921,3 +1921,9 @@ dependencies = [ "quote", "syn", ] + +[[package]] +name = "zmij" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ff05f8caa9038894637571ae6b9e29466c1f4f829d26c9b28f869a29cbe3445" From 84d591934ba27054fb42e807928df0901ac1eea7 Mon Sep 17 00:00:00 2001 From: Olivier 'reivilibre Date: Tue, 3 Feb 2026 15:33:39 +0000 Subject: [PATCH 12/19] Add notes that new experimental features should have associated tracking issues. (#19410) Signed-off-by: Olivier 'reivilibre --- changelog.d/19410.misc | 1 + docs/development/experimental_features.md | 27 +++++++++++++++++++++++ synapse/config/experimental.py | 6 ++++- 3 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19410.misc diff --git a/changelog.d/19410.misc b/changelog.d/19410.misc new file mode 100644 index 0000000000..97a4070304 --- /dev/null +++ b/changelog.d/19410.misc @@ -0,0 +1 @@ +Add notes that new experimental features should have associated tracking issues. \ No newline at end of file diff --git a/docs/development/experimental_features.md b/docs/development/experimental_features.md index d6b11496cc..5a86017ecf 100644 --- a/docs/development/experimental_features.md +++ b/docs/development/experimental_features.md @@ -35,3 +35,30 @@ but one should be used if unsure. New experimental configuration flags should be added under the `experimental` configuration key (see the `synapse.config.experimental` file) and either explain (briefly) what is being enabled, or include the MSC number. +The configuration flag should link to the tracking issue for the experimental feature (see below). + + +## Tracking issues for experimental features + +In the interest of having some documentation around experimental features, without +polluting the stable documentation, all new experimental features should have a tracking issue with +[the `T-ExperimentalFeature` label](https://github.com/element-hq/synapse/issues?q=sort%3Aupdated-desc+state%3Aopen+label%3A%22T-ExperimentalFeature%22), +kept open as long as the experimental feature is present in Synapse. + +The configuration option for the feature should have a comment linking to the tracking issue, +for ease of discoverability. + +As a guideline, the issue should contain: + +- Context for why this experimental feature is in Synapse + - This could well be a link to somewhere else, where this context is already available. +- If applicable, why the feature is enabled by default. (Why do we need to enable it by default and why is it safe?) +- If applicable, setup instructions for any non-standard components or configuration needed by the feature. + (Ideally this will be moved to the configuration manual after stabilisation.) +- Design decisions behind the Synapse implementation. + (Ideally this will be moved to the developers' documentation after stabilisation.) +- Any caveats around the current implementation of the feature, such as: + - missing aspects + - breakage or incompatibility that is expected if/when the feature is stabilised, + or when the feature is turned on/off +- Criteria for how we know whether we can remove the feature in the future. diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 0150b71621..1bd70ead09 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -366,7 +366,11 @@ class MSC3866Config: class ExperimentalConfig(Config): - """Config section for enabling experimental features""" + """Config section for enabling experimental features + + All new experimental features should have a tracking issue with the + `T-ExperimentalFeatures` label, kept open as long as the experimental + feature is present in Synapse.""" section = "experimental" From 98a540a41d6fd5920fad2bab900425e869fb9cbd Mon Sep 17 00:00:00 2001 From: Renaud Allard Date: Tue, 3 Feb 2026 16:40:20 +0100 Subject: [PATCH 13/19] Fix a typo in check_dependencies.py which makes setuptools_rust a running dependency (#19417) ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) There is a typo in check_dependencies.py which makes setuptools_rust a runtime requirement, but there is no need for it at runtime. This patch solves the typo. I tested starting 1.146.0 with this patch and without setuptools_rust and it starts correctly --- changelog.d/19417.bugfix | 1 + synapse/util/check_dependencies.py | 3 ++- tests/util/test_check_dependencies.py | 6 +++--- 3 files changed, 6 insertions(+), 4 deletions(-) create mode 100644 changelog.d/19417.bugfix diff --git a/changelog.d/19417.bugfix b/changelog.d/19417.bugfix new file mode 100644 index 0000000000..9f5c9c02d9 --- /dev/null +++ b/changelog.d/19417.bugfix @@ -0,0 +1 @@ +Fix a typo that incorrectly made `setuptools_rust` a runtime dependency. diff --git a/synapse/util/check_dependencies.py b/synapse/util/check_dependencies.py index 7e92b55592..cf7573c99d 100644 --- a/synapse/util/check_dependencies.py +++ b/synapse/util/check_dependencies.py @@ -32,6 +32,7 @@ from packaging.markers import Marker, Value, Variable, default_environment from packaging.requirements import Requirement +from packaging.utils import canonicalize_name DISTRIBUTION_NAME = "matrix-synapse" @@ -96,7 +97,7 @@ def _should_ignore_runtime_requirement(req: Requirement) -> bool: # In any case, workaround this by ignoring setuptools_rust here. (It might be # slightly cleaner to put `setuptools_rust` in a `build` extra or similar, but for # now let's do something quick and dirty. - if req.name == "setuptools_rust": + if canonicalize_name(req.name) == "setuptools-rust": return True return False diff --git a/tests/util/test_check_dependencies.py b/tests/util/test_check_dependencies.py index b7a23dcd9d..eed0519c44 100644 --- a/tests/util/test_check_dependencies.py +++ b/tests/util/test_check_dependencies.py @@ -201,13 +201,13 @@ def test_setuptools_rust_ignored(self) -> None: """ with patch( "synapse.util.check_dependencies.metadata.requires", - return_value=["setuptools_rust >= 1.3"], + return_value=["setuptools-rust >= 1.3"], ): with self.mock_installed_package(None): - # should not raise, even if setuptools_rust is not installed + # should not raise, even if setuptools-rust is not installed check_requirements() with self.mock_installed_package(old): - # We also ignore old versions of setuptools_rust + # We also ignore old versions of setuptools-rust check_requirements() def test_python_version_markers_respected(self) -> None: From 3048ff8b262995254e3ff2c07de3474d5bfa6daf Mon Sep 17 00:00:00 2001 From: Devon Hudson Date: Tue, 3 Feb 2026 08:56:37 -0700 Subject: [PATCH 14/19] 1.147.0rc1 --- CHANGES.md | 20 ++++++++++++++++++++ changelog.d/19306.misc | 1 - changelog.d/19399.misc | 1 - changelog.d/19400.misc | 1 - changelog.d/19402.misc | 1 - changelog.d/19405.misc | 1 - changelog.d/19410.misc | 1 - changelog.d/19412.misc | 1 - changelog.d/19416.bugfix | 1 - changelog.d/19417.bugfix | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- schema/synapse-config.schema.yaml | 2 +- 13 files changed, 28 insertions(+), 11 deletions(-) delete mode 100644 changelog.d/19306.misc delete mode 100644 changelog.d/19399.misc delete mode 100644 changelog.d/19400.misc delete mode 100644 changelog.d/19402.misc delete mode 100644 changelog.d/19405.misc delete mode 100644 changelog.d/19410.misc delete mode 100644 changelog.d/19412.misc delete mode 100644 changelog.d/19416.bugfix delete mode 100644 changelog.d/19417.bugfix diff --git a/CHANGES.md b/CHANGES.md index 516ac4dbfa..9c688402ef 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,23 @@ +# Synapse 1.147.0rc1 (2026-02-03) + +## Bugfixes + +- Fix memory leak caused by not cleaning up stopped looping calls. Introduced in v1.140.0. ([\#19416](https://github.com/element-hq/synapse/issues/19416)) +- Fix a typo that incorrectly made `setuptools_rust` a runtime dependency. ([\#19417](https://github.com/element-hq/synapse/issues/19417)) + +## Internal Changes + +- Prune stale entries from `sliding_sync_connection_required_state` table. ([\#19306](https://github.com/element-hq/synapse/issues/19306)) +- Update "Event Send Time Quantiles" graph to only use dots for the event persistence rate (Grafana dashboard). ([\#19399](https://github.com/element-hq/synapse/issues/19399)) +- Update and align Grafana dashboard to use regex matching for `job` selectors (`job=~"$job"`) so the "all" value works correctly across all panels. ([\#19400](https://github.com/element-hq/synapse/issues/19400)) +- Don't retry joining partial state rooms all at once on startup. ([\#19402](https://github.com/element-hq/synapse/issues/19402)) +- Disallow requests to the health endpoint from containing trailing path characters. ([\#19405](https://github.com/element-hq/synapse/issues/19405)) +- Add notes that new experimental features should have associated tracking issues. ([\#19410](https://github.com/element-hq/synapse/issues/19410)) +- Bump `pyo3` from 0.26.0 to 0.27.2 and `pythonize` from 0.26.0 to 0.27.0. Contributed by @razvp @ ERCOM. ([\#19412](https://github.com/element-hq/synapse/issues/19412)) + + + + # Synapse 1.146.0 (2026-01-27) No significant changes since 1.146.0rc1. diff --git a/changelog.d/19306.misc b/changelog.d/19306.misc deleted file mode 100644 index 463f87eac3..0000000000 --- a/changelog.d/19306.misc +++ /dev/null @@ -1 +0,0 @@ -Prune stale entries from `sliding_sync_connection_required_state` table. diff --git a/changelog.d/19399.misc b/changelog.d/19399.misc deleted file mode 100644 index 0d02904f40..0000000000 --- a/changelog.d/19399.misc +++ /dev/null @@ -1 +0,0 @@ -Update "Event Send Time Quantiles" graph to only use dots for the event persistence rate (Grafana dashboard). diff --git a/changelog.d/19400.misc b/changelog.d/19400.misc deleted file mode 100644 index 33b0cb509c..0000000000 --- a/changelog.d/19400.misc +++ /dev/null @@ -1 +0,0 @@ -Update and align Grafana dashboard to use regex matching for `job` selectors (`job=~"$job"`) so the "all" value works correctly across all panels. diff --git a/changelog.d/19402.misc b/changelog.d/19402.misc deleted file mode 100644 index 0e1ee104a7..0000000000 --- a/changelog.d/19402.misc +++ /dev/null @@ -1 +0,0 @@ -Don't retry joining partial state rooms all at once on startup. diff --git a/changelog.d/19405.misc b/changelog.d/19405.misc deleted file mode 100644 index f3be5b2027..0000000000 --- a/changelog.d/19405.misc +++ /dev/null @@ -1 +0,0 @@ -Disallow requests to the health endpoint from containing trailing path characters. \ No newline at end of file diff --git a/changelog.d/19410.misc b/changelog.d/19410.misc deleted file mode 100644 index 97a4070304..0000000000 --- a/changelog.d/19410.misc +++ /dev/null @@ -1 +0,0 @@ -Add notes that new experimental features should have associated tracking issues. \ No newline at end of file diff --git a/changelog.d/19412.misc b/changelog.d/19412.misc deleted file mode 100644 index 6b811be799..0000000000 --- a/changelog.d/19412.misc +++ /dev/null @@ -1 +0,0 @@ -Bump `pyo3` from 0.26.0 to 0.27.2 and `pythonize` from 0.26.0 to 0.27.0. Contributed by @razvp @ ERCOM. \ No newline at end of file diff --git a/changelog.d/19416.bugfix b/changelog.d/19416.bugfix deleted file mode 100644 index f0c2872410..0000000000 --- a/changelog.d/19416.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix memory leak caused by not cleaning up stopped looping calls. Introduced in v1.140.0. diff --git a/changelog.d/19417.bugfix b/changelog.d/19417.bugfix deleted file mode 100644 index 9f5c9c02d9..0000000000 --- a/changelog.d/19417.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a typo that incorrectly made `setuptools_rust` a runtime dependency. diff --git a/debian/changelog b/debian/changelog index ac013ba1b8..8168bde3a2 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.147.0~rc1) stable; urgency=medium + + * New Synapse release 1.147.0rc1. + + -- Synapse Packaging team Tue, 03 Feb 2026 08:53:17 -0700 + matrix-synapse-py3 (1.146.0) stable; urgency=medium * New Synapse release 1.146.0. diff --git a/pyproject.toml b/pyproject.toml index d61f7177bd..a26247ab1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "matrix-synapse" -version = "1.146.0" +version = "1.147.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" readme = "README.rst" authors = [ diff --git a/schema/synapse-config.schema.yaml b/schema/synapse-config.schema.yaml index 3ed7196752..99c9b6e9bd 100644 --- a/schema/synapse-config.schema.yaml +++ b/schema/synapse-config.schema.yaml @@ -1,5 +1,5 @@ $schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json -$id: https://element-hq.github.io/synapse/schema/synapse/v1.146/synapse-config.schema.json +$id: https://element-hq.github.io/synapse/schema/synapse/v1.147/synapse-config.schema.json type: object properties: modules: From d423ab0cd5c160b6f95e541e7be42309f49631d7 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 10 Feb 2026 13:40:56 +0100 Subject: [PATCH 15/19] 1.147.0 --- CHANGES.md | 7 +++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 9c688402ef..9b683bfc69 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,10 @@ +# synapse 1.147.0 (2026-02-10) + +No significant changes since 1.147.0rc1. + + + + # Synapse 1.147.0rc1 (2026-02-03) ## Bugfixes diff --git a/debian/changelog b/debian/changelog index 8168bde3a2..36a2921552 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.147.0) stable; urgency=medium + + * New synapse release 1.147.0. + + -- Synapse Packaging team Tue, 10 Feb 2026 12:39:58 +0000 + matrix-synapse-py3 (1.147.0~rc1) stable; urgency=medium * New Synapse release 1.147.0rc1. diff --git a/pyproject.toml b/pyproject.toml index a26247ab1e..df10bdf19b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "matrix-synapse" -version = "1.147.0rc1" +version = "1.147.0" description = "Homeserver for the Matrix decentralised comms protocol" readme = "README.rst" authors = [ From a6152cec043edcc1be7647ee95ca5324f8c43780 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 10 Feb 2026 13:41:53 +0100 Subject: [PATCH 16/19] Fixup changelog --- CHANGES.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 9b683bfc69..2aac866df8 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,10 +1,7 @@ -# synapse 1.147.0 (2026-02-10) +# Synapse 1.147.0 (2026-02-10) No significant changes since 1.147.0rc1. - - - # Synapse 1.147.0rc1 (2026-02-03) ## Bugfixes From be362429deac94901c2c2b4b5d86ee68adad410a Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 12 Feb 2026 16:39:59 +0100 Subject: [PATCH 17/19] Refuse requests and events signed by banned signing keys (#19459) Co-authored-by: Devon Hudson --- changelog.d/19459.misc | 1 + synapse/crypto/keyring.py | 23 ++++++++ tests/crypto/test_keyring.py | 47 +++++++++++++++- tests/federation/test_federation_base.py | 68 ++++++++++++++++++++++++ 4 files changed, 138 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19459.misc create mode 100644 tests/federation/test_federation_base.py diff --git a/changelog.d/19459.misc b/changelog.d/19459.misc new file mode 100644 index 0000000000..9075af91ef --- /dev/null +++ b/changelog.d/19459.misc @@ -0,0 +1 @@ +Block federation requests and events authenticated using a known insecure signing key. See [CVE-2026-24044](https://www.cve.org/CVERecord?id=CVE-2026-24044) / [ELEMENTSEC-2025-1670](https://github.com/element-hq/ess-helm/security/advisories/GHSA-qwcj-h6m8-vp6q). diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 883f682e77..0d4d5e0e17 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -22,6 +22,7 @@ import abc import logging from contextlib import ExitStack +from http import HTTPStatus from typing import TYPE_CHECKING, Callable, Iterable import attr @@ -60,6 +61,15 @@ logger = logging.getLogger(__name__) +# List of Unpadded Base64 server signing keys that are known to be vulnerable to attack. +# Incoming requests from homeservers using any of these keys should be refused. +# Events containing signatures using any of these keys should be refused. +BANNED_SERVER_SIGNING_KEYS = ( + # ELEMENTSEC-2025-1670 + "l/O9hxMVKB6Lg+3Hqf0FQQZhVESQcMzbPN1Cz2nM3og=", +) + + @attr.s(slots=True, frozen=True, cmp=False, auto_attribs=True) class VerifyJsonRequest: """ @@ -349,6 +359,19 @@ async def process_request(self, verify_request: VerifyJsonRequest) -> None: if key_result.valid_until_ts < verify_request.minimum_valid_until_ts: continue + key = encode_verify_key_base64(key_result.verify_key) + if key in BANNED_SERVER_SIGNING_KEYS: + raise SynapseError( + HTTPStatus.UNAUTHORIZED, + "Server signing key %s:%s for server %s has been banned by this server" + % ( + key_result.verify_key.alg, + key_result.verify_key.version, + verify_request.server_name, + ), + Codes.UNAUTHORIZED, + ) + await self.process_json(key_result.verify_key, verify_request) verified = True diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 3cc905f699..6bc935f272 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -20,7 +20,7 @@ # import time from typing import Any, cast -from unittest.mock import Mock +from unittest.mock import Mock, patch import attr import canonicaljson @@ -238,6 +238,51 @@ def test_verify_json_for_server(self) -> None: # self.assertFalse(d.called) self.get_success(d) + def test_verify_json_for_server_using_banned_key(self) -> None: + """Ensure that JSON signed using a banned server_signing_key fails verification.""" + kr = keyring.Keyring(self.hs) + + banned_signing_key = signedjson.key.generate_signing_key("1") + r = self.hs.get_datastores().main.store_server_keys_response( + "server9", + from_server="test", + ts_added_ms=int(time.time() * 1000), + verify_keys={ + get_key_id(banned_signing_key): FetchKeyResult( + verify_key=get_verify_key(banned_signing_key), valid_until_ts=1000 + ) + }, + # The entire response gets signed & stored, just include the bits we + # care about. + response_json={ + "verify_keys": { + get_key_id(banned_signing_key): { + "key": encode_verify_key_base64( + get_verify_key(banned_signing_key) + ) + } + } + }, + ) + self.get_success(r) + + json1: JsonDict = {} + signedjson.sign.sign_json(json1, "server9", banned_signing_key) + + # Ensure the signatures check out normally + d = kr.verify_json_for_server("server9", json1, 500) + self.get_success(d) + + # Patch the list of banned signing keys and ensure the signature check fails + with patch.object( + keyring, + "BANNED_SERVER_SIGNING_KEYS", + (encode_verify_key_base64(get_verify_key(banned_signing_key))), + ): + # should fail on a signed object signed by the banned key + d = kr.verify_json_for_server("server9", json1, 500) + self.get_failure(d, SynapseError) + def test_verify_for_local_server(self) -> None: """Ensure that locally signed JSON can be verified without fetching keys over federation diff --git a/tests/federation/test_federation_base.py b/tests/federation/test_federation_base.py new file mode 100644 index 0000000000..1bc1da1feb --- /dev/null +++ b/tests/federation/test_federation_base.py @@ -0,0 +1,68 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2026 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# + + +from unittest.mock import patch + +from signedjson.key import encode_verify_key_base64, get_verify_key + +from synapse.crypto import keyring +from synapse.crypto.event_signing import add_hashes_and_signatures +from synapse.events import make_event_from_dict +from synapse.federation.federation_base import InvalidEventSignatureError + +from tests import unittest + + +class FederationBaseTestCase(unittest.HomeserverTestCase): + def test_events_signed_by_banned_key_are_refused(self) -> None: + """Ensure that event JSON signed using a banned server_signing_key fails verification.""" + event_dict = { + "content": {"body": "Here is the message content"}, + "event_id": "$0:domain", + "origin_server_ts": 1000000, + "type": "m.room.message", + "room_id": "!r:domain", + "sender": f"@u:{self.hs.config.server.server_name}", + "signatures": {}, + "unsigned": {"age_ts": 1000000}, + } + + add_hashes_and_signatures( + self.hs.config.server.default_room_version, + event_dict, + self.hs.config.server.server_name, + self.hs.signing_key, + ) + event = make_event_from_dict(event_dict) + fs = self.hs.get_federation_server() + + # Ensure the signatures check out normally + self.get_success( + fs._check_sigs_and_hash(self.hs.config.server.default_room_version, event) + ) + + # Patch the list of banned signing keys and ensure the signature check fails + with patch.object( + keyring, + "BANNED_SERVER_SIGNING_KEYS", + (encode_verify_key_base64(get_verify_key(self.hs.signing_key))), + ): + self.get_failure( + fs._check_sigs_and_hash( + self.hs.config.server.default_room_version, event + ), + InvalidEventSignatureError, + ) From 3fdeba35a6ed27e6d77bea9895863baef3d43c5d Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 12 Feb 2026 16:46:02 +0100 Subject: [PATCH 18/19] 1.147.1 --- CHANGES.md | 9 +++++++++ changelog.d/19459.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/19459.misc diff --git a/CHANGES.md b/CHANGES.md index 2aac866df8..e7d27469da 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +# synapse 1.147.1 (2026-02-12) + +## Internal Changes + +- Block federation requests and events authenticated using a known insecure signing key. See [CVE-2026-24044](https://www.cve.org/CVERecord?id=CVE-2026-24044) / [ELEMENTSEC-2025-1670](https://github.com/element-hq/ess-helm/security/advisories/GHSA-qwcj-h6m8-vp6q). ([\#19459](https://github.com/element-hq/synapse/issues/19459)) + + + + # Synapse 1.147.0 (2026-02-10) No significant changes since 1.147.0rc1. diff --git a/changelog.d/19459.misc b/changelog.d/19459.misc deleted file mode 100644 index 9075af91ef..0000000000 --- a/changelog.d/19459.misc +++ /dev/null @@ -1 +0,0 @@ -Block federation requests and events authenticated using a known insecure signing key. See [CVE-2026-24044](https://www.cve.org/CVERecord?id=CVE-2026-24044) / [ELEMENTSEC-2025-1670](https://github.com/element-hq/ess-helm/security/advisories/GHSA-qwcj-h6m8-vp6q). diff --git a/debian/changelog b/debian/changelog index 36a2921552..a6852dac5e 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.147.1) stable; urgency=medium + + * New synapse release 1.147.1. + + -- Synapse Packaging team Thu, 12 Feb 2026 15:45:15 +0000 + matrix-synapse-py3 (1.147.0) stable; urgency=medium * New synapse release 1.147.0. diff --git a/pyproject.toml b/pyproject.toml index df10bdf19b..8073f8ec44 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "matrix-synapse" -version = "1.147.0" +version = "1.147.1" description = "Homeserver for the Matrix decentralised comms protocol" readme = "README.rst" authors = [ From 7ff86876539581856ee2531de94a29fb61bbcc57 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 12 Feb 2026 16:50:45 +0100 Subject: [PATCH 19/19] Fuxup changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index e7d27469da..fe27ccb040 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,4 @@ -# synapse 1.147.1 (2026-02-12) +# Synapse 1.147.1 (2026-02-12) ## Internal Changes