Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit 859dc05

Browse files
authored
Rebuild other indexes using stream_ordering (#10282)
We need to rebuild *all* of the indexes that use the current `stream_ordering` column.
1 parent e6f5b93 commit 859dc05

File tree

3 files changed

+56
-6
lines changed

3 files changed

+56
-6
lines changed

changelog.d/10282.bugfix

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fix a long-standing bug where Synapse would return errors after 2<sup>31</sup> events were handled by the server.

synapse/storage/databases/main/events_bg_updates.py

Lines changed: 47 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,18 @@
2929
logger = logging.getLogger(__name__)
3030

3131

32-
_REPLACE_STREAM_ORDRING_SQL_COMMANDS = (
32+
_REPLACE_STREAM_ORDERING_SQL_COMMANDS = (
3333
# there should be no leftover rows without a stream_ordering2, but just in case...
3434
"UPDATE events SET stream_ordering2 = stream_ordering WHERE stream_ordering2 IS NULL",
35-
# finally, we can drop the rule and switch the columns
35+
# now we can drop the rule and switch the columns
3636
"DROP RULE populate_stream_ordering2 ON events",
3737
"ALTER TABLE events DROP COLUMN stream_ordering",
3838
"ALTER TABLE events RENAME COLUMN stream_ordering2 TO stream_ordering",
39+
# ... and finally, rename the indexes into place for consistency with sqlite
40+
"ALTER INDEX event_contains_url_index2 RENAME TO event_contains_url_index",
41+
"ALTER INDEX events_order_room2 RENAME TO events_order_room",
42+
"ALTER INDEX events_room_stream2 RENAME TO events_room_stream",
43+
"ALTER INDEX events_ts2 RENAME TO events_ts",
3944
)
4045

4146

@@ -45,6 +50,10 @@ class _BackgroundUpdates:
4550
DELETE_SOFT_FAILED_EXTREMITIES = "delete_soft_failed_extremities"
4651
POPULATE_STREAM_ORDERING2 = "populate_stream_ordering2"
4752
INDEX_STREAM_ORDERING2 = "index_stream_ordering2"
53+
INDEX_STREAM_ORDERING2_CONTAINS_URL = "index_stream_ordering2_contains_url"
54+
INDEX_STREAM_ORDERING2_ROOM_ORDER = "index_stream_ordering2_room_order"
55+
INDEX_STREAM_ORDERING2_ROOM_STREAM = "index_stream_ordering2_room_stream"
56+
INDEX_STREAM_ORDERING2_TS = "index_stream_ordering2_ts"
4857
REPLACE_STREAM_ORDERING_COLUMN = "replace_stream_ordering_column"
4958

5059

@@ -155,24 +164,59 @@ def __init__(self, database: DatabasePool, db_conn, hs):
155164
self._purged_chain_cover_index,
156165
)
157166

167+
################################################################################
168+
158169
# bg updates for replacing stream_ordering with a BIGINT
159170
# (these only run on postgres.)
171+
160172
self.db_pool.updates.register_background_update_handler(
161173
_BackgroundUpdates.POPULATE_STREAM_ORDERING2,
162174
self._background_populate_stream_ordering2,
163175
)
176+
# CREATE UNIQUE INDEX events_stream_ordering ON events(stream_ordering2);
164177
self.db_pool.updates.register_background_index_update(
165178
_BackgroundUpdates.INDEX_STREAM_ORDERING2,
166179
index_name="events_stream_ordering",
167180
table="events",
168181
columns=["stream_ordering2"],
169182
unique=True,
170183
)
184+
# CREATE INDEX event_contains_url_index ON events(room_id, topological_ordering, stream_ordering) WHERE contains_url = true AND outlier = false;
185+
self.db_pool.updates.register_background_index_update(
186+
_BackgroundUpdates.INDEX_STREAM_ORDERING2_CONTAINS_URL,
187+
index_name="event_contains_url_index2",
188+
table="events",
189+
columns=["room_id", "topological_ordering", "stream_ordering2"],
190+
where_clause="contains_url = true AND outlier = false",
191+
)
192+
# CREATE INDEX events_order_room ON events(room_id, topological_ordering, stream_ordering);
193+
self.db_pool.updates.register_background_index_update(
194+
_BackgroundUpdates.INDEX_STREAM_ORDERING2_ROOM_ORDER,
195+
index_name="events_order_room2",
196+
table="events",
197+
columns=["room_id", "topological_ordering", "stream_ordering2"],
198+
)
199+
# CREATE INDEX events_room_stream ON events(room_id, stream_ordering);
200+
self.db_pool.updates.register_background_index_update(
201+
_BackgroundUpdates.INDEX_STREAM_ORDERING2_ROOM_STREAM,
202+
index_name="events_room_stream2",
203+
table="events",
204+
columns=["room_id", "stream_ordering2"],
205+
)
206+
# CREATE INDEX events_ts ON events(origin_server_ts, stream_ordering);
207+
self.db_pool.updates.register_background_index_update(
208+
_BackgroundUpdates.INDEX_STREAM_ORDERING2_TS,
209+
index_name="events_ts2",
210+
table="events",
211+
columns=["origin_server_ts", "stream_ordering2"],
212+
)
171213
self.db_pool.updates.register_background_update_handler(
172214
_BackgroundUpdates.REPLACE_STREAM_ORDERING_COLUMN,
173215
self._background_replace_stream_ordering_column,
174216
)
175217

218+
################################################################################
219+
176220
async def _background_reindex_fields_sender(self, progress, batch_size):
177221
target_min_stream_id = progress["target_min_stream_id_inclusive"]
178222
max_stream_id = progress["max_stream_id_exclusive"]
@@ -1098,7 +1142,7 @@ async def _background_replace_stream_ordering_column(
10981142
"""Drop the old 'stream_ordering' column and rename 'stream_ordering2' into its place."""
10991143

11001144
def process(txn: Cursor) -> None:
1101-
for sql in _REPLACE_STREAM_ORDRING_SQL_COMMANDS:
1145+
for sql in _REPLACE_STREAM_ORDERING_SQL_COMMANDS:
11021146
logger.info("completing stream_ordering migration: %s", sql)
11031147
txn.execute(sql)
11041148

synapse/storage/schema/main/delta/60/01recreate_stream_ordering.sql.postgres

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,15 @@ CREATE OR REPLACE RULE "populate_stream_ordering2" AS
3131
INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
3232
(6001, 'populate_stream_ordering2', '{}');
3333

34-
-- ... and another to build an index on it
34+
-- ... and some more to build indexes on it. These aren't really interdependent
35+
-- but the backround_updates manager can only handle a single dependency per update.
3536
INSERT INTO background_updates (ordering, update_name, progress_json, depends_on) VALUES
36-
(6001, 'index_stream_ordering2', '{}', 'populate_stream_ordering2');
37+
(6001, 'index_stream_ordering2', '{}', 'populate_stream_ordering2'),
38+
(6001, 'index_stream_ordering2_room_order', '{}', 'index_stream_ordering2'),
39+
(6001, 'index_stream_ordering2_contains_url', '{}', 'index_stream_ordering2_room_order'),
40+
(6001, 'index_stream_ordering2_room_stream', '{}', 'index_stream_ordering2_contains_url'),
41+
(6001, 'index_stream_ordering2_ts', '{}', 'index_stream_ordering2_room_stream');
3742

3843
-- ... and another to do the switcheroo
3944
INSERT INTO background_updates (ordering, update_name, progress_json, depends_on) VALUES
40-
(6001, 'replace_stream_ordering_column', '{}', 'index_stream_ordering2');
45+
(6003, 'replace_stream_ordering_column', '{}', 'index_stream_ordering2_ts');

0 commit comments

Comments
 (0)