| 
75 | 75 |     LoggingDatabaseConnection,  | 
76 | 76 |     LoggingTransaction,  | 
77 | 77 | )  | 
78 |  | -from synapse.storage.engines import PostgresEngine  | 
79 | 78 | from synapse.storage.types import Cursor  | 
80 | 79 | from synapse.storage.util.id_generators import (  | 
81 | 80 |     AbstractStreamIdGenerator,  | 
82 | 81 |     MultiWriterIdGenerator,  | 
83 |  | -    StreamIdGenerator,  | 
84 | 82 | )  | 
85 | 83 | from synapse.storage.util.sequence import build_sequence_generator  | 
86 | 84 | from synapse.types import JsonDict, get_domain_from_id  | 
@@ -195,51 +193,28 @@ def __init__(  | 
195 | 193 | 
 
  | 
196 | 194 |         self._stream_id_gen: AbstractStreamIdGenerator  | 
197 | 195 |         self._backfill_id_gen: AbstractStreamIdGenerator  | 
198 |  | -        if isinstance(database.engine, PostgresEngine):  | 
199 |  | -            # If we're using Postgres than we can use `MultiWriterIdGenerator`  | 
200 |  | -            # regardless of whether this process writes to the streams or not.  | 
201 |  | -            self._stream_id_gen = MultiWriterIdGenerator(  | 
202 |  | -                db_conn=db_conn,  | 
203 |  | -                db=database,  | 
204 |  | -                notifier=hs.get_replication_notifier(),  | 
205 |  | -                stream_name="events",  | 
206 |  | -                instance_name=hs.get_instance_name(),  | 
207 |  | -                tables=[("events", "instance_name", "stream_ordering")],  | 
208 |  | -                sequence_name="events_stream_seq",  | 
209 |  | -                writers=hs.config.worker.writers.events,  | 
210 |  | -            )  | 
211 |  | -            self._backfill_id_gen = MultiWriterIdGenerator(  | 
212 |  | -                db_conn=db_conn,  | 
213 |  | -                db=database,  | 
214 |  | -                notifier=hs.get_replication_notifier(),  | 
215 |  | -                stream_name="backfill",  | 
216 |  | -                instance_name=hs.get_instance_name(),  | 
217 |  | -                tables=[("events", "instance_name", "stream_ordering")],  | 
218 |  | -                sequence_name="events_backfill_stream_seq",  | 
219 |  | -                positive=False,  | 
220 |  | -                writers=hs.config.worker.writers.events,  | 
221 |  | -            )  | 
222 |  | -        else:  | 
223 |  | -            # Multiple writers are not supported for SQLite.  | 
224 |  | -            #  | 
225 |  | -            # We shouldn't be running in worker mode with SQLite, but its useful  | 
226 |  | -            # to support it for unit tests.  | 
227 |  | -            self._stream_id_gen = StreamIdGenerator(  | 
228 |  | -                db_conn,  | 
229 |  | -                hs.get_replication_notifier(),  | 
230 |  | -                "events",  | 
231 |  | -                "stream_ordering",  | 
232 |  | -                is_writer=hs.get_instance_name() in hs.config.worker.writers.events,  | 
233 |  | -            )  | 
234 |  | -            self._backfill_id_gen = StreamIdGenerator(  | 
235 |  | -                db_conn,  | 
236 |  | -                hs.get_replication_notifier(),  | 
237 |  | -                "events",  | 
238 |  | -                "stream_ordering",  | 
239 |  | -                step=-1,  | 
240 |  | -                extra_tables=[("ex_outlier_stream", "event_stream_ordering")],  | 
241 |  | -                is_writer=hs.get_instance_name() in hs.config.worker.writers.events,  | 
242 |  | -            )  | 
 | 196 | + | 
 | 197 | +        self._stream_id_gen = MultiWriterIdGenerator(  | 
 | 198 | +            db_conn=db_conn,  | 
 | 199 | +            db=database,  | 
 | 200 | +            notifier=hs.get_replication_notifier(),  | 
 | 201 | +            stream_name="events",  | 
 | 202 | +            instance_name=hs.get_instance_name(),  | 
 | 203 | +            tables=[("events", "instance_name", "stream_ordering")],  | 
 | 204 | +            sequence_name="events_stream_seq",  | 
 | 205 | +            writers=hs.config.worker.writers.events,  | 
 | 206 | +        )  | 
 | 207 | +        self._backfill_id_gen = MultiWriterIdGenerator(  | 
 | 208 | +            db_conn=db_conn,  | 
 | 209 | +            db=database,  | 
 | 210 | +            notifier=hs.get_replication_notifier(),  | 
 | 211 | +            stream_name="backfill",  | 
 | 212 | +            instance_name=hs.get_instance_name(),  | 
 | 213 | +            tables=[("events", "instance_name", "stream_ordering")],  | 
 | 214 | +            sequence_name="events_backfill_stream_seq",  | 
 | 215 | +            positive=False,  | 
 | 216 | +            writers=hs.config.worker.writers.events,  | 
 | 217 | +        )  | 
243 | 218 | 
 
  | 
244 | 219 |         events_max = self._stream_id_gen.get_current_token()  | 
245 | 220 |         curr_state_delta_prefill, min_curr_state_delta_id = self.db_pool.get_cache_dict(  | 
@@ -309,27 +284,17 @@ def get_chain_id_txn(txn: Cursor) -> int:  | 
309 | 284 | 
 
  | 
310 | 285 |         self._un_partial_stated_events_stream_id_gen: AbstractStreamIdGenerator  | 
311 | 286 | 
 
  | 
312 |  | -        if isinstance(database.engine, PostgresEngine):  | 
313 |  | -            self._un_partial_stated_events_stream_id_gen = MultiWriterIdGenerator(  | 
314 |  | -                db_conn=db_conn,  | 
315 |  | -                db=database,  | 
316 |  | -                notifier=hs.get_replication_notifier(),  | 
317 |  | -                stream_name="un_partial_stated_event_stream",  | 
318 |  | -                instance_name=hs.get_instance_name(),  | 
319 |  | -                tables=[  | 
320 |  | -                    ("un_partial_stated_event_stream", "instance_name", "stream_id")  | 
321 |  | -                ],  | 
322 |  | -                sequence_name="un_partial_stated_event_stream_sequence",  | 
323 |  | -                # TODO(faster_joins, multiple writers) Support multiple writers.  | 
324 |  | -                writers=["master"],  | 
325 |  | -            )  | 
326 |  | -        else:  | 
327 |  | -            self._un_partial_stated_events_stream_id_gen = StreamIdGenerator(  | 
328 |  | -                db_conn,  | 
329 |  | -                hs.get_replication_notifier(),  | 
330 |  | -                "un_partial_stated_event_stream",  | 
331 |  | -                "stream_id",  | 
332 |  | -            )  | 
 | 287 | +        self._un_partial_stated_events_stream_id_gen = MultiWriterIdGenerator(  | 
 | 288 | +            db_conn=db_conn,  | 
 | 289 | +            db=database,  | 
 | 290 | +            notifier=hs.get_replication_notifier(),  | 
 | 291 | +            stream_name="un_partial_stated_event_stream",  | 
 | 292 | +            instance_name=hs.get_instance_name(),  | 
 | 293 | +            tables=[("un_partial_stated_event_stream", "instance_name", "stream_id")],  | 
 | 294 | +            sequence_name="un_partial_stated_event_stream_sequence",  | 
 | 295 | +            # TODO(faster_joins, multiple writers) Support multiple writers.  | 
 | 296 | +            writers=["master"],  | 
 | 297 | +        )  | 
333 | 298 | 
 
  | 
334 | 299 |     def get_un_partial_stated_events_token(self, instance_name: str) -> int:  | 
335 | 300 |         return (  | 
 | 
0 commit comments