Add sender and contains_url field to events table
This commit is contained in:
parent
a37ee2293c
commit
b64aa6d687
|
@ -152,6 +152,7 @@ _EventCacheEntry = namedtuple("_EventCacheEntry", ("event", "redacted_event"))
|
|||
|
||||
class EventsStore(SQLBaseStore):
|
||||
EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts"
|
||||
EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url"
|
||||
|
||||
def __init__(self, hs):
|
||||
super(EventsStore, self).__init__(hs)
|
||||
|
@ -159,6 +160,10 @@ class EventsStore(SQLBaseStore):
|
|||
self.register_background_update_handler(
|
||||
self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
|
||||
)
|
||||
self.register_background_update_handler(
|
||||
self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME,
|
||||
self._background_reindex_fields_sender,
|
||||
)
|
||||
|
||||
self._event_persist_queue = _EventPeristenceQueue()
|
||||
|
||||
|
@ -576,6 +581,11 @@ class EventsStore(SQLBaseStore):
|
|||
"content": encode_json(event.content).decode("UTF-8"),
|
||||
"origin_server_ts": int(event.origin_server_ts),
|
||||
"received_ts": self._clock.time_msec(),
|
||||
"sender": event.sender,
|
||||
"contains_url": (
|
||||
"url" in event.content
|
||||
and isinstance(event.content["url"], basestring)
|
||||
),
|
||||
}
|
||||
for event, _ in events_and_contexts
|
||||
],
|
||||
|
@ -1115,6 +1125,78 @@ class EventsStore(SQLBaseStore):
|
|||
ret = yield self.runInteraction("count_messages", _count_messages)
|
||||
defer.returnValue(ret)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _background_reindex_fields_sender(self, progress, batch_size):
|
||||
target_min_stream_id = progress["target_min_stream_id_inclusive"]
|
||||
max_stream_id = progress["max_stream_id_exclusive"]
|
||||
rows_inserted = progress.get("rows_inserted", 0)
|
||||
|
||||
INSERT_CLUMP_SIZE = 1000
|
||||
|
||||
def reindex_txn(txn):
|
||||
sql = (
|
||||
"SELECT stream_ordering, event_id, json FROM events"
|
||||
" INNER JOIN event_json USING (event_id)"
|
||||
" WHERE ? <= stream_ordering AND stream_ordering < ?"
|
||||
" ORDER BY stream_ordering DESC"
|
||||
" LIMIT ?"
|
||||
)
|
||||
|
||||
txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
|
||||
|
||||
rows = txn.fetchall()
|
||||
if not rows:
|
||||
return 0
|
||||
|
||||
min_stream_id = rows[-1][0]
|
||||
|
||||
update_rows = []
|
||||
for row in rows:
|
||||
try:
|
||||
event_id = row[1]
|
||||
event_json = json.loads(row[2])
|
||||
sender = event_json["sender"]
|
||||
content = event_json["content"]
|
||||
|
||||
contains_url = "url" in content
|
||||
if contains_url:
|
||||
contains_url &= isinstance(content["url"], basestring)
|
||||
except (KeyError, AttributeError):
|
||||
# If the event is missing a necessary field then
|
||||
# skip over it.
|
||||
continue
|
||||
|
||||
update_rows.append((sender, contains_url, event_id))
|
||||
|
||||
sql = (
|
||||
"UPDATE events SET sender = ?, contains_url = ? WHERE event_id = ?"
|
||||
)
|
||||
|
||||
for index in range(0, len(update_rows), INSERT_CLUMP_SIZE):
|
||||
clump = update_rows[index:index + INSERT_CLUMP_SIZE]
|
||||
txn.executemany(sql, clump)
|
||||
|
||||
progress = {
|
||||
"target_min_stream_id_inclusive": target_min_stream_id,
|
||||
"max_stream_id_exclusive": min_stream_id,
|
||||
"rows_inserted": rows_inserted + len(rows)
|
||||
}
|
||||
|
||||
self._background_update_progress_txn(
|
||||
txn, self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME, progress
|
||||
)
|
||||
|
||||
return len(rows)
|
||||
|
||||
result = yield self.runInteraction(
|
||||
self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME, reindex_txn
|
||||
)
|
||||
|
||||
if not result:
|
||||
yield self._end_background_update(self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME)
|
||||
|
||||
defer.returnValue(result)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _background_reindex_origin_server_ts(self, progress, batch_size):
|
||||
target_min_stream_id = progress["target_min_stream_id_inclusive"]
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.storage.prepare_database import get_statements
|
||||
|
||||
import logging
|
||||
import ujson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
ALTER_TABLE = """
|
||||
ALTER TABLE events ADD COLUMN sender TEXT;
|
||||
ALTER TABLE events ADD COLUMN contains_url BOOLEAN;
|
||||
"""
|
||||
|
||||
|
||||
def run_create(cur, database_engine, *args, **kwargs):
|
||||
for statement in get_statements(ALTER_TABLE.splitlines()):
|
||||
cur.execute(statement)
|
||||
|
||||
cur.execute("SELECT MIN(stream_ordering) FROM events")
|
||||
rows = cur.fetchall()
|
||||
min_stream_id = rows[0][0]
|
||||
|
||||
cur.execute("SELECT MAX(stream_ordering) FROM events")
|
||||
rows = cur.fetchall()
|
||||
max_stream_id = rows[0][0]
|
||||
|
||||
if min_stream_id is not None and max_stream_id is not None:
|
||||
progress = {
|
||||
"target_min_stream_id_inclusive": min_stream_id,
|
||||
"max_stream_id_exclusive": max_stream_id + 1,
|
||||
"rows_inserted": 0,
|
||||
}
|
||||
progress_json = ujson.dumps(progress)
|
||||
|
||||
sql = (
|
||||
"INSERT into background_updates (update_name, progress_json)"
|
||||
" VALUES (?, ?)"
|
||||
)
|
||||
|
||||
sql = database_engine.convert_param_style(sql)
|
||||
|
||||
cur.execute(sql, ("event_fields_sender_url", progress_json))
|
||||
|
||||
|
||||
def run_upgrade(cur, database_engine, *args, **kwargs):
|
||||
pass
|
Loading…
Reference in New Issue