Revert "Stop writing to column `user_id` of tables `profiles` and `user_filters`. (#15953)

* Revert "Stop writing to column `user_id` of tables `profiles` and `user_filters` (#15787)"

This reverts commit f25b0f8808.

* newsfragement
This commit is contained in:
Shay 2023-07-18 03:44:09 -07:00 committed by GitHub
parent 92014fbf72
commit e625c3dca0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 174 additions and 122 deletions

1
changelog.d/15953.misc Normal file
View File

@ -0,0 +1 @@
Revert "Stop writing to column user_id of tables profiles and user_filters`.

View File

@ -98,8 +98,6 @@ UNIQUE_INDEX_BACKGROUND_UPDATES = {
"event_push_summary": "event_push_summary_unique_index2", "event_push_summary": "event_push_summary_unique_index2",
"receipts_linearized": "receipts_linearized_unique_index", "receipts_linearized": "receipts_linearized_unique_index",
"receipts_graph": "receipts_graph_unique_index", "receipts_graph": "receipts_graph_unique_index",
"profiles": "profiles_full_user_id_key_idx",
"user_filters": "full_users_filters_unique_idx",
} }

View File

@ -15,7 +15,7 @@
# limitations under the License. # limitations under the License.
import logging import logging
from typing import TYPE_CHECKING, List, Optional, Tuple, Union, cast from typing import TYPE_CHECKING, List, Optional, Tuple, cast
from synapse.api.constants import Direction from synapse.api.constants import Direction
from synapse.config.homeserver import HomeServerConfig from synapse.config.homeserver import HomeServerConfig
@ -196,7 +196,7 @@ class DataStore(
txn: LoggingTransaction, txn: LoggingTransaction,
) -> Tuple[List[JsonDict], int]: ) -> Tuple[List[JsonDict], int]:
filters = [] filters = []
args: List[Union[str, int]] = [] args = [self.hs.config.server.server_name]
# Set ordering # Set ordering
order_by_column = UserSortOrder(order_by).value order_by_column = UserSortOrder(order_by).value
@ -263,7 +263,7 @@ class DataStore(
sql_base = f""" sql_base = f"""
FROM users as u FROM users as u
LEFT JOIN profiles AS p ON u.name = p.full_user_id LEFT JOIN profiles AS p ON u.name = '@' || p.user_id || ':' || ?
LEFT JOIN erased_users AS eu ON u.name = eu.user_id LEFT JOIN erased_users AS eu ON u.name = eu.user_id
{where_clause} {where_clause}
""" """

View File

@ -188,13 +188,14 @@ class FilteringWorkerStore(SQLBaseStore):
filter_id = max_id + 1 filter_id = max_id + 1
sql = ( sql = (
"INSERT INTO user_filters (full_user_id, filter_id, filter_json)" "INSERT INTO user_filters (full_user_id, user_id, filter_id, filter_json)"
"VALUES(?, ?, ?)" "VALUES(?, ?, ?, ?)"
) )
txn.execute( txn.execute(
sql, sql,
( (
user_id.to_string(), user_id.to_string(),
user_id.localpart,
filter_id, filter_id,
bytearray(def_json), bytearray(def_json),
), ),

View File

@ -173,9 +173,10 @@ class ProfileWorkerStore(SQLBaseStore):
) )
async def create_profile(self, user_id: UserID) -> None: async def create_profile(self, user_id: UserID) -> None:
user_localpart = user_id.localpart
await self.db_pool.simple_insert( await self.db_pool.simple_insert(
table="profiles", table="profiles",
values={"full_user_id": user_id.to_string()}, values={"user_id": user_localpart, "full_user_id": user_id.to_string()},
desc="create_profile", desc="create_profile",
) )
@ -190,11 +191,13 @@ class ProfileWorkerStore(SQLBaseStore):
new_displayname: The new display name. If this is None, the user's display new_displayname: The new display name. If this is None, the user's display
name is removed. name is removed.
""" """
user_localpart = user_id.localpart
await self.db_pool.simple_upsert( await self.db_pool.simple_upsert(
table="profiles", table="profiles",
keyvalues={"full_user_id": user_id.to_string()}, keyvalues={"user_id": user_localpart},
values={ values={
"displayname": new_displayname, "displayname": new_displayname,
"full_user_id": user_id.to_string(),
}, },
desc="set_profile_displayname", desc="set_profile_displayname",
) )
@ -210,10 +213,11 @@ class ProfileWorkerStore(SQLBaseStore):
new_avatar_url: The new avatar URL. If this is None, the user's avatar is new_avatar_url: The new avatar URL. If this is None, the user's avatar is
removed. removed.
""" """
user_localpart = user_id.localpart
await self.db_pool.simple_upsert( await self.db_pool.simple_upsert(
table="profiles", table="profiles",
keyvalues={"full_user_id": user_id.to_string()}, keyvalues={"user_id": user_localpart},
values={"avatar_url": new_avatar_url}, values={"avatar_url": new_avatar_url, "full_user_id": user_id.to_string()},
desc="set_profile_avatar_url", desc="set_profile_avatar_url",
) )

View File

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
SCHEMA_VERSION = 79 # remember to update the list below when updating SCHEMA_VERSION = 78 # remember to update the list below when updating
"""Represents the expectations made by the codebase about the database schema """Represents the expectations made by the codebase about the database schema
This should be incremented whenever the codebase changes its requirements on the This should be incremented whenever the codebase changes its requirements on the
@ -106,9 +106,6 @@ Changes in SCHEMA_VERSION = 77
Changes in SCHEMA_VERSION = 78 Changes in SCHEMA_VERSION = 78
- Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters - Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters
Changes in SCHEMA_VERSION = 79
- We no longer write to column user_id of tables profiles and user_filters
""" """
@ -121,9 +118,7 @@ SCHEMA_COMPAT_VERSION = (
# #
# insertions to the column `full_user_id` of tables profiles and user_filters can no # insertions to the column `full_user_id` of tables profiles and user_filters can no
# longer be null # longer be null
# 76
# we no longer write to column `full_user_id` of tables profiles and user_filters
78
) )
"""Limit on how far the synapse codebase can be rolled back without breaking db compat """Limit on how far the synapse codebase can be rolled back without breaking db compat

View File

@ -1,50 +0,0 @@
from synapse.storage.database import LoggingTransaction
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None:
"""
Update to drop the NOT NULL constraint on column user_id so that we can cease to
write to it without inserts to other columns triggering the constraint
"""
if isinstance(database_engine, PostgresEngine):
drop_sql = """
ALTER TABLE profiles ALTER COLUMN user_id DROP NOT NULL
"""
cur.execute(drop_sql)
else:
# irritatingly in SQLite we need to rewrite the table to drop the constraint.
cur.execute("DROP TABLE IF EXISTS temp_profiles")
create_sql = """
CREATE TABLE temp_profiles (
full_user_id text NOT NULL,
user_id text,
displayname text,
avatar_url text,
UNIQUE (full_user_id),
UNIQUE (user_id)
)
"""
cur.execute(create_sql)
copy_sql = """
INSERT INTO temp_profiles (
user_id,
displayname,
avatar_url,
full_user_id)
SELECT user_id, displayname, avatar_url, full_user_id FROM profiles
"""
cur.execute(copy_sql)
drop_sql = """
DROP TABLE profiles
"""
cur.execute(drop_sql)
rename_sql = """
ALTER TABLE temp_profiles RENAME to profiles
"""
cur.execute(rename_sql)

View File

@ -1,54 +0,0 @@
from synapse.storage.database import LoggingTransaction
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None:
"""
Update to drop the NOT NULL constraint on column user_id so that we can cease to
write to it without inserts to other columns triggering the constraint
"""
if isinstance(database_engine, PostgresEngine):
drop_sql = """
ALTER TABLE user_filters ALTER COLUMN user_id DROP NOT NULL
"""
cur.execute(drop_sql)
else:
# irritatingly in SQLite we need to rewrite the table to drop the constraint.
cur.execute("DROP TABLE IF EXISTS temp_user_filters")
create_sql = """
CREATE TABLE temp_user_filters (
full_user_id text NOT NULL,
user_id text,
filter_id bigint NOT NULL,
filter_json bytea NOT NULL
)
"""
cur.execute(create_sql)
index_sql = """
CREATE UNIQUE INDEX IF NOT EXISTS user_filters_full_user_id_unique ON
temp_user_filters (full_user_id, filter_id)
"""
cur.execute(index_sql)
copy_sql = """
INSERT INTO temp_user_filters (
user_id,
filter_id,
filter_json,
full_user_id)
SELECT user_id, filter_id, filter_json, full_user_id FROM user_filters
"""
cur.execute(copy_sql)
drop_sql = """
DROP TABLE user_filters
"""
cur.execute(drop_sql)
rename_sql = """
ALTER TABLE temp_user_filters RENAME to user_filters
"""
cur.execute(rename_sql)

View File

@ -15,6 +15,8 @@
from twisted.test.proto_helpers import MemoryReactor from twisted.test.proto_helpers import MemoryReactor
from synapse.server import HomeServer from synapse.server import HomeServer
from synapse.storage.database import LoggingTransaction
from synapse.storage.engines import PostgresEngine
from synapse.types import UserID from synapse.types import UserID
from synapse.util import Clock from synapse.util import Clock
@ -62,3 +64,64 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase):
self.assertIsNone( self.assertIsNone(
self.get_success(self.store.get_profile_avatar_url(self.u_frank)) self.get_success(self.store.get_profile_avatar_url(self.u_frank))
) )
def test_profiles_bg_migration(self) -> None:
"""
Test background job that copies entries from column user_id to full_user_id, adding
the hostname in the process.
"""
updater = self.hs.get_datastores().main.db_pool.updates
# drop the constraint so we can insert nulls in full_user_id to populate the test
if isinstance(self.store.database_engine, PostgresEngine):
def f(txn: LoggingTransaction) -> None:
txn.execute(
"ALTER TABLE profiles DROP CONSTRAINT full_user_id_not_null"
)
self.get_success(self.store.db_pool.runInteraction("", f))
for i in range(0, 70):
self.get_success(
self.store.db_pool.simple_insert(
"profiles",
{"user_id": f"hello{i:02}"},
)
)
# re-add the constraint so that when it's validated it actually exists
if isinstance(self.store.database_engine, PostgresEngine):
def f(txn: LoggingTransaction) -> None:
txn.execute(
"ALTER TABLE profiles ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID"
)
self.get_success(self.store.db_pool.runInteraction("", f))
self.get_success(
self.store.db_pool.simple_insert(
"background_updates",
values={
"update_name": "populate_full_user_id_profiles",
"progress_json": "{}",
},
)
)
self.get_success(
updater.run_background_updates(False),
)
expected_values = []
for i in range(0, 70):
expected_values.append((f"@hello{i:02}:{self.hs.hostname}",))
res = self.get_success(
self.store.db_pool.execute(
"", None, "SELECT full_user_id from profiles ORDER BY full_user_id"
)
)
self.assertEqual(len(res), len(expected_values))
self.assertEqual(res, expected_values)

View File

@ -0,0 +1,94 @@
# Copyright 2023 The Matrix.org Foundation C.I.C
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.test.proto_helpers import MemoryReactor
from synapse.server import HomeServer
from synapse.storage.database import LoggingTransaction
from synapse.storage.engines import PostgresEngine
from synapse.util import Clock
from tests import unittest
class UserFiltersStoreTestCase(unittest.HomeserverTestCase):
"""
Test background migration that copies entries from column user_id to full_user_id, adding
the hostname in the process.
"""
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
self.store = hs.get_datastores().main
def test_bg_migration(self) -> None:
updater = self.hs.get_datastores().main.db_pool.updates
# drop the constraint so we can insert nulls in full_user_id to populate the test
if isinstance(self.store.database_engine, PostgresEngine):
def f(txn: LoggingTransaction) -> None:
txn.execute(
"ALTER TABLE user_filters DROP CONSTRAINT full_user_id_not_null"
)
self.get_success(self.store.db_pool.runInteraction("", f))
for i in range(0, 70):
self.get_success(
self.store.db_pool.simple_insert(
"user_filters",
{
"user_id": f"hello{i:02}",
"filter_id": i,
"filter_json": bytearray(i),
},
)
)
# re-add the constraint so that when it's validated it actually exists
if isinstance(self.store.database_engine, PostgresEngine):
def f(txn: LoggingTransaction) -> None:
txn.execute(
"ALTER TABLE user_filters ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID"
)
self.get_success(self.store.db_pool.runInteraction("", f))
self.get_success(
self.store.db_pool.simple_insert(
"background_updates",
values={
"update_name": "populate_full_user_id_user_filters",
"progress_json": "{}",
},
)
)
self.get_success(
updater.run_background_updates(False),
)
expected_values = []
for i in range(0, 70):
expected_values.append((f"@hello{i:02}:{self.hs.hostname}",))
res = self.get_success(
self.store.db_pool.execute(
"", None, "SELECT full_user_id from user_filters ORDER BY full_user_id"
)
)
self.assertEqual(len(res), len(expected_values))
self.assertEqual(res, expected_values)