Don't needlessly batch in `add_event_to_cache` (#10784)

We've already batched up the events previously, and assume in other
places in the events.py file that we have. Removing this makes it easier
to adjust the batch sizes in one place.
This commit is contained in:
Erik Johnston 2021-09-10 10:16:52 +01:00 committed by GitHub
parent 273b6861f2
commit 7f0565e029
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 24 additions and 26 deletions

1
changelog.d/10784.misc Normal file
View File

@ -0,0 +1 @@
Minor speed ups when joining large rooms over federation.

View File

@ -1547,11 +1547,10 @@ class PersistEventsStore:
to_prefill = [] to_prefill = []
rows = [] rows = []
N = 200
for i in range(0, len(events_and_contexts), N): ev_map = {e.event_id: e for e, _ in events_and_contexts}
ev_map = {e[0].event_id: e[0] for e in events_and_contexts[i : i + N]}
if not ev_map: if not ev_map:
break return
sql = ( sql = (
"SELECT " "SELECT "
@ -1573,9 +1572,7 @@ class PersistEventsStore:
for row in rows: for row in rows:
event = ev_map[row["event_id"]] event = ev_map[row["event_id"]]
if not row["rejects"] and not row["redacts"]: if not row["rejects"] and not row["redacts"]:
to_prefill.append( to_prefill.append(_EventCacheEntry(event=event, redacted_event=None))
_EventCacheEntry(event=event, redacted_event=None)
)
def prefill(): def prefill():
for cache_entry in to_prefill: for cache_entry in to_prefill: