Merge branch 'master' into develop
This commit is contained in:
commit
8e8431bc6e
10
CHANGES.md
10
CHANGES.md
|
@ -1,4 +1,4 @@
|
||||||
# Synapse 1.88.0rc1 (2023-07-11)
|
# Synapse 1.88.0 (2023-07-18)
|
||||||
|
|
||||||
This release
|
This release
|
||||||
- raises the minimum supported version of Python to 3.8, as Python 3.7 is now [end-of-life](https://devguide.python.org/versions/), and
|
- raises the minimum supported version of Python to 3.8, as Python 3.7 is now [end-of-life](https://devguide.python.org/versions/), and
|
||||||
|
@ -6,6 +6,14 @@ This release
|
||||||
|
|
||||||
See [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.88/docs/upgrade.md#upgrading-to-v1880) for more information.
|
See [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.88/docs/upgrade.md#upgrading-to-v1880) for more information.
|
||||||
|
|
||||||
|
|
||||||
|
### Bugfixes
|
||||||
|
|
||||||
|
- Revert "Stop writing to column `user_id` of tables `profiles` and `user_filters`", which was introduced in Synapse 1.88.0rc1. ([\#15953](https://github.com/matrix-org/synapse/issues/15953))
|
||||||
|
|
||||||
|
|
||||||
|
# Synapse 1.88.0rc1 (2023-07-11)
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
|
|
||||||
- Add `not_user_type` param to the [list accounts admin API](https://matrix-org.github.io/synapse/v1.88/admin_api/user_admin_api.html#list-accounts). ([\#15844](https://github.com/matrix-org/synapse/issues/15844))
|
- Add `not_user_type` param to the [list accounts admin API](https://matrix-org.github.io/synapse/v1.88/admin_api/user_admin_api.html#list-accounts). ([\#15844](https://github.com/matrix-org/synapse/issues/15844))
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
matrix-synapse-py3 (1.88.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.88.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jul 2023 13:59:28 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (1.88.0~rc1) stable; urgency=medium
|
matrix-synapse-py3 (1.88.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.88.0rc1.
|
* New Synapse release 1.88.0rc1.
|
||||||
|
|
|
@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
|
||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "matrix-synapse"
|
name = "matrix-synapse"
|
||||||
version = "1.88.0rc1"
|
version = "1.88.0"
|
||||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
|
|
|
@ -98,8 +98,6 @@ UNIQUE_INDEX_BACKGROUND_UPDATES = {
|
||||||
"event_push_summary": "event_push_summary_unique_index2",
|
"event_push_summary": "event_push_summary_unique_index2",
|
||||||
"receipts_linearized": "receipts_linearized_unique_index",
|
"receipts_linearized": "receipts_linearized_unique_index",
|
||||||
"receipts_graph": "receipts_graph_unique_index",
|
"receipts_graph": "receipts_graph_unique_index",
|
||||||
"profiles": "profiles_full_user_id_key_idx",
|
|
||||||
"user_filters": "full_users_filters_unique_idx",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, List, Optional, Tuple, Union, cast
|
from typing import TYPE_CHECKING, List, Optional, Tuple, cast
|
||||||
|
|
||||||
from synapse.api.constants import Direction
|
from synapse.api.constants import Direction
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
|
@ -196,7 +196,7 @@ class DataStore(
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
) -> Tuple[List[JsonDict], int]:
|
) -> Tuple[List[JsonDict], int]:
|
||||||
filters = []
|
filters = []
|
||||||
args: List[Union[str, int]] = []
|
args = [self.hs.config.server.server_name]
|
||||||
|
|
||||||
# Set ordering
|
# Set ordering
|
||||||
order_by_column = UserSortOrder(order_by).value
|
order_by_column = UserSortOrder(order_by).value
|
||||||
|
@ -263,7 +263,7 @@ class DataStore(
|
||||||
|
|
||||||
sql_base = f"""
|
sql_base = f"""
|
||||||
FROM users as u
|
FROM users as u
|
||||||
LEFT JOIN profiles AS p ON u.name = p.full_user_id
|
LEFT JOIN profiles AS p ON u.name = '@' || p.user_id || ':' || ?
|
||||||
LEFT JOIN erased_users AS eu ON u.name = eu.user_id
|
LEFT JOIN erased_users AS eu ON u.name = eu.user_id
|
||||||
{where_clause}
|
{where_clause}
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -188,13 +188,14 @@ class FilteringWorkerStore(SQLBaseStore):
|
||||||
filter_id = max_id + 1
|
filter_id = max_id + 1
|
||||||
|
|
||||||
sql = (
|
sql = (
|
||||||
"INSERT INTO user_filters (full_user_id, filter_id, filter_json)"
|
"INSERT INTO user_filters (full_user_id, user_id, filter_id, filter_json)"
|
||||||
"VALUES(?, ?, ?)"
|
"VALUES(?, ?, ?, ?)"
|
||||||
)
|
)
|
||||||
txn.execute(
|
txn.execute(
|
||||||
sql,
|
sql,
|
||||||
(
|
(
|
||||||
user_id.to_string(),
|
user_id.to_string(),
|
||||||
|
user_id.localpart,
|
||||||
filter_id,
|
filter_id,
|
||||||
bytearray(def_json),
|
bytearray(def_json),
|
||||||
),
|
),
|
||||||
|
|
|
@ -173,9 +173,10 @@ class ProfileWorkerStore(SQLBaseStore):
|
||||||
)
|
)
|
||||||
|
|
||||||
async def create_profile(self, user_id: UserID) -> None:
|
async def create_profile(self, user_id: UserID) -> None:
|
||||||
|
user_localpart = user_id.localpart
|
||||||
await self.db_pool.simple_insert(
|
await self.db_pool.simple_insert(
|
||||||
table="profiles",
|
table="profiles",
|
||||||
values={"full_user_id": user_id.to_string()},
|
values={"user_id": user_localpart, "full_user_id": user_id.to_string()},
|
||||||
desc="create_profile",
|
desc="create_profile",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -190,11 +191,13 @@ class ProfileWorkerStore(SQLBaseStore):
|
||||||
new_displayname: The new display name. If this is None, the user's display
|
new_displayname: The new display name. If this is None, the user's display
|
||||||
name is removed.
|
name is removed.
|
||||||
"""
|
"""
|
||||||
|
user_localpart = user_id.localpart
|
||||||
await self.db_pool.simple_upsert(
|
await self.db_pool.simple_upsert(
|
||||||
table="profiles",
|
table="profiles",
|
||||||
keyvalues={"full_user_id": user_id.to_string()},
|
keyvalues={"user_id": user_localpart},
|
||||||
values={
|
values={
|
||||||
"displayname": new_displayname,
|
"displayname": new_displayname,
|
||||||
|
"full_user_id": user_id.to_string(),
|
||||||
},
|
},
|
||||||
desc="set_profile_displayname",
|
desc="set_profile_displayname",
|
||||||
)
|
)
|
||||||
|
@ -210,10 +213,11 @@ class ProfileWorkerStore(SQLBaseStore):
|
||||||
new_avatar_url: The new avatar URL. If this is None, the user's avatar is
|
new_avatar_url: The new avatar URL. If this is None, the user's avatar is
|
||||||
removed.
|
removed.
|
||||||
"""
|
"""
|
||||||
|
user_localpart = user_id.localpart
|
||||||
await self.db_pool.simple_upsert(
|
await self.db_pool.simple_upsert(
|
||||||
table="profiles",
|
table="profiles",
|
||||||
keyvalues={"full_user_id": user_id.to_string()},
|
keyvalues={"user_id": user_localpart},
|
||||||
values={"avatar_url": new_avatar_url},
|
values={"avatar_url": new_avatar_url, "full_user_id": user_id.to_string()},
|
||||||
desc="set_profile_avatar_url",
|
desc="set_profile_avatar_url",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
SCHEMA_VERSION = 79 # remember to update the list below when updating
|
SCHEMA_VERSION = 78 # remember to update the list below when updating
|
||||||
"""Represents the expectations made by the codebase about the database schema
|
"""Represents the expectations made by the codebase about the database schema
|
||||||
|
|
||||||
This should be incremented whenever the codebase changes its requirements on the
|
This should be incremented whenever the codebase changes its requirements on the
|
||||||
|
@ -106,9 +106,6 @@ Changes in SCHEMA_VERSION = 77
|
||||||
|
|
||||||
Changes in SCHEMA_VERSION = 78
|
Changes in SCHEMA_VERSION = 78
|
||||||
- Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters
|
- Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters
|
||||||
|
|
||||||
Changes in SCHEMA_VERSION = 79
|
|
||||||
- We no longer write to column user_id of tables profiles and user_filters
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@ -121,9 +118,7 @@ SCHEMA_COMPAT_VERSION = (
|
||||||
#
|
#
|
||||||
# insertions to the column `full_user_id` of tables profiles and user_filters can no
|
# insertions to the column `full_user_id` of tables profiles and user_filters can no
|
||||||
# longer be null
|
# longer be null
|
||||||
#
|
76
|
||||||
# we no longer write to column `full_user_id` of tables profiles and user_filters
|
|
||||||
78
|
|
||||||
)
|
)
|
||||||
"""Limit on how far the synapse codebase can be rolled back without breaking db compat
|
"""Limit on how far the synapse codebase can be rolled back without breaking db compat
|
||||||
|
|
||||||
|
|
|
@ -1,50 +0,0 @@
|
||||||
from synapse.storage.database import LoggingTransaction
|
|
||||||
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
|
|
||||||
|
|
||||||
|
|
||||||
def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None:
|
|
||||||
"""
|
|
||||||
Update to drop the NOT NULL constraint on column user_id so that we can cease to
|
|
||||||
write to it without inserts to other columns triggering the constraint
|
|
||||||
"""
|
|
||||||
|
|
||||||
if isinstance(database_engine, PostgresEngine):
|
|
||||||
drop_sql = """
|
|
||||||
ALTER TABLE profiles ALTER COLUMN user_id DROP NOT NULL
|
|
||||||
"""
|
|
||||||
cur.execute(drop_sql)
|
|
||||||
else:
|
|
||||||
# irritatingly in SQLite we need to rewrite the table to drop the constraint.
|
|
||||||
cur.execute("DROP TABLE IF EXISTS temp_profiles")
|
|
||||||
|
|
||||||
create_sql = """
|
|
||||||
CREATE TABLE temp_profiles (
|
|
||||||
full_user_id text NOT NULL,
|
|
||||||
user_id text,
|
|
||||||
displayname text,
|
|
||||||
avatar_url text,
|
|
||||||
UNIQUE (full_user_id),
|
|
||||||
UNIQUE (user_id)
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
cur.execute(create_sql)
|
|
||||||
|
|
||||||
copy_sql = """
|
|
||||||
INSERT INTO temp_profiles (
|
|
||||||
user_id,
|
|
||||||
displayname,
|
|
||||||
avatar_url,
|
|
||||||
full_user_id)
|
|
||||||
SELECT user_id, displayname, avatar_url, full_user_id FROM profiles
|
|
||||||
"""
|
|
||||||
cur.execute(copy_sql)
|
|
||||||
|
|
||||||
drop_sql = """
|
|
||||||
DROP TABLE profiles
|
|
||||||
"""
|
|
||||||
cur.execute(drop_sql)
|
|
||||||
|
|
||||||
rename_sql = """
|
|
||||||
ALTER TABLE temp_profiles RENAME to profiles
|
|
||||||
"""
|
|
||||||
cur.execute(rename_sql)
|
|
|
@ -1,54 +0,0 @@
|
||||||
from synapse.storage.database import LoggingTransaction
|
|
||||||
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
|
|
||||||
|
|
||||||
|
|
||||||
def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None:
|
|
||||||
"""
|
|
||||||
Update to drop the NOT NULL constraint on column user_id so that we can cease to
|
|
||||||
write to it without inserts to other columns triggering the constraint
|
|
||||||
"""
|
|
||||||
if isinstance(database_engine, PostgresEngine):
|
|
||||||
drop_sql = """
|
|
||||||
ALTER TABLE user_filters ALTER COLUMN user_id DROP NOT NULL
|
|
||||||
"""
|
|
||||||
cur.execute(drop_sql)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# irritatingly in SQLite we need to rewrite the table to drop the constraint.
|
|
||||||
cur.execute("DROP TABLE IF EXISTS temp_user_filters")
|
|
||||||
|
|
||||||
create_sql = """
|
|
||||||
CREATE TABLE temp_user_filters (
|
|
||||||
full_user_id text NOT NULL,
|
|
||||||
user_id text,
|
|
||||||
filter_id bigint NOT NULL,
|
|
||||||
filter_json bytea NOT NULL
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
cur.execute(create_sql)
|
|
||||||
|
|
||||||
index_sql = """
|
|
||||||
CREATE UNIQUE INDEX IF NOT EXISTS user_filters_full_user_id_unique ON
|
|
||||||
temp_user_filters (full_user_id, filter_id)
|
|
||||||
"""
|
|
||||||
cur.execute(index_sql)
|
|
||||||
|
|
||||||
copy_sql = """
|
|
||||||
INSERT INTO temp_user_filters (
|
|
||||||
user_id,
|
|
||||||
filter_id,
|
|
||||||
filter_json,
|
|
||||||
full_user_id)
|
|
||||||
SELECT user_id, filter_id, filter_json, full_user_id FROM user_filters
|
|
||||||
"""
|
|
||||||
cur.execute(copy_sql)
|
|
||||||
|
|
||||||
drop_sql = """
|
|
||||||
DROP TABLE user_filters
|
|
||||||
"""
|
|
||||||
cur.execute(drop_sql)
|
|
||||||
|
|
||||||
rename_sql = """
|
|
||||||
ALTER TABLE temp_user_filters RENAME to user_filters
|
|
||||||
"""
|
|
||||||
cur.execute(rename_sql)
|
|
|
@ -15,6 +15,8 @@
|
||||||
from twisted.test.proto_helpers import MemoryReactor
|
from twisted.test.proto_helpers import MemoryReactor
|
||||||
|
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
|
from synapse.storage.database import LoggingTransaction
|
||||||
|
from synapse.storage.engines import PostgresEngine
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
|
|
||||||
|
@ -62,3 +64,64 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertIsNone(
|
self.assertIsNone(
|
||||||
self.get_success(self.store.get_profile_avatar_url(self.u_frank))
|
self.get_success(self.store.get_profile_avatar_url(self.u_frank))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_profiles_bg_migration(self) -> None:
|
||||||
|
"""
|
||||||
|
Test background job that copies entries from column user_id to full_user_id, adding
|
||||||
|
the hostname in the process.
|
||||||
|
"""
|
||||||
|
updater = self.hs.get_datastores().main.db_pool.updates
|
||||||
|
|
||||||
|
# drop the constraint so we can insert nulls in full_user_id to populate the test
|
||||||
|
if isinstance(self.store.database_engine, PostgresEngine):
|
||||||
|
|
||||||
|
def f(txn: LoggingTransaction) -> None:
|
||||||
|
txn.execute(
|
||||||
|
"ALTER TABLE profiles DROP CONSTRAINT full_user_id_not_null"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.get_success(self.store.db_pool.runInteraction("", f))
|
||||||
|
|
||||||
|
for i in range(0, 70):
|
||||||
|
self.get_success(
|
||||||
|
self.store.db_pool.simple_insert(
|
||||||
|
"profiles",
|
||||||
|
{"user_id": f"hello{i:02}"},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# re-add the constraint so that when it's validated it actually exists
|
||||||
|
if isinstance(self.store.database_engine, PostgresEngine):
|
||||||
|
|
||||||
|
def f(txn: LoggingTransaction) -> None:
|
||||||
|
txn.execute(
|
||||||
|
"ALTER TABLE profiles ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.get_success(self.store.db_pool.runInteraction("", f))
|
||||||
|
|
||||||
|
self.get_success(
|
||||||
|
self.store.db_pool.simple_insert(
|
||||||
|
"background_updates",
|
||||||
|
values={
|
||||||
|
"update_name": "populate_full_user_id_profiles",
|
||||||
|
"progress_json": "{}",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.get_success(
|
||||||
|
updater.run_background_updates(False),
|
||||||
|
)
|
||||||
|
|
||||||
|
expected_values = []
|
||||||
|
for i in range(0, 70):
|
||||||
|
expected_values.append((f"@hello{i:02}:{self.hs.hostname}",))
|
||||||
|
|
||||||
|
res = self.get_success(
|
||||||
|
self.store.db_pool.execute(
|
||||||
|
"", None, "SELECT full_user_id from profiles ORDER BY full_user_id"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(len(res), len(expected_values))
|
||||||
|
self.assertEqual(res, expected_values)
|
||||||
|
|
|
@ -0,0 +1,94 @@
|
||||||
|
# Copyright 2023 The Matrix.org Foundation C.I.C
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from twisted.test.proto_helpers import MemoryReactor
|
||||||
|
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
from synapse.storage.database import LoggingTransaction
|
||||||
|
from synapse.storage.engines import PostgresEngine
|
||||||
|
from synapse.util import Clock
|
||||||
|
|
||||||
|
from tests import unittest
|
||||||
|
|
||||||
|
|
||||||
|
class UserFiltersStoreTestCase(unittest.HomeserverTestCase):
|
||||||
|
"""
|
||||||
|
Test background migration that copies entries from column user_id to full_user_id, adding
|
||||||
|
the hostname in the process.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
|
self.store = hs.get_datastores().main
|
||||||
|
|
||||||
|
def test_bg_migration(self) -> None:
|
||||||
|
updater = self.hs.get_datastores().main.db_pool.updates
|
||||||
|
|
||||||
|
# drop the constraint so we can insert nulls in full_user_id to populate the test
|
||||||
|
if isinstance(self.store.database_engine, PostgresEngine):
|
||||||
|
|
||||||
|
def f(txn: LoggingTransaction) -> None:
|
||||||
|
txn.execute(
|
||||||
|
"ALTER TABLE user_filters DROP CONSTRAINT full_user_id_not_null"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.get_success(self.store.db_pool.runInteraction("", f))
|
||||||
|
|
||||||
|
for i in range(0, 70):
|
||||||
|
self.get_success(
|
||||||
|
self.store.db_pool.simple_insert(
|
||||||
|
"user_filters",
|
||||||
|
{
|
||||||
|
"user_id": f"hello{i:02}",
|
||||||
|
"filter_id": i,
|
||||||
|
"filter_json": bytearray(i),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# re-add the constraint so that when it's validated it actually exists
|
||||||
|
if isinstance(self.store.database_engine, PostgresEngine):
|
||||||
|
|
||||||
|
def f(txn: LoggingTransaction) -> None:
|
||||||
|
txn.execute(
|
||||||
|
"ALTER TABLE user_filters ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.get_success(self.store.db_pool.runInteraction("", f))
|
||||||
|
|
||||||
|
self.get_success(
|
||||||
|
self.store.db_pool.simple_insert(
|
||||||
|
"background_updates",
|
||||||
|
values={
|
||||||
|
"update_name": "populate_full_user_id_user_filters",
|
||||||
|
"progress_json": "{}",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.get_success(
|
||||||
|
updater.run_background_updates(False),
|
||||||
|
)
|
||||||
|
|
||||||
|
expected_values = []
|
||||||
|
for i in range(0, 70):
|
||||||
|
expected_values.append((f"@hello{i:02}:{self.hs.hostname}",))
|
||||||
|
|
||||||
|
res = self.get_success(
|
||||||
|
self.store.db_pool.execute(
|
||||||
|
"", None, "SELECT full_user_id from user_filters ORDER BY full_user_id"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(len(res), len(expected_values))
|
||||||
|
self.assertEqual(res, expected_values)
|
Loading…
Reference in New Issue