Remove support for aggregating reactions (#15172)
It turns out that no clients rely on server-side aggregation of `m.annotation` relationships: it's just not very useful as currently implemented. It's also non-trivial to calculate. I want to remove it from MSC2677, so to keep the implementation in line, let's remove it here.
This commit is contained in:
parent
b2fd03d075
commit
2b78981736
|
@ -0,0 +1 @@
|
||||||
|
Remove support for server-side aggregation of reactions.
|
|
@ -516,11 +516,6 @@ class EventClientSerializer:
|
||||||
# being serialized.
|
# being serialized.
|
||||||
serialized_aggregations = {}
|
serialized_aggregations = {}
|
||||||
|
|
||||||
if event_aggregations.annotations:
|
|
||||||
serialized_aggregations[
|
|
||||||
RelationTypes.ANNOTATION
|
|
||||||
] = event_aggregations.annotations
|
|
||||||
|
|
||||||
if event_aggregations.references:
|
if event_aggregations.references:
|
||||||
serialized_aggregations[
|
serialized_aggregations[
|
||||||
RelationTypes.REFERENCE
|
RelationTypes.REFERENCE
|
||||||
|
|
|
@ -60,13 +60,12 @@ class BundledAggregations:
|
||||||
Some values require additional processing during serialization.
|
Some values require additional processing during serialization.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
annotations: Optional[JsonDict] = None
|
|
||||||
references: Optional[JsonDict] = None
|
references: Optional[JsonDict] = None
|
||||||
replace: Optional[EventBase] = None
|
replace: Optional[EventBase] = None
|
||||||
thread: Optional[_ThreadAggregation] = None
|
thread: Optional[_ThreadAggregation] = None
|
||||||
|
|
||||||
def __bool__(self) -> bool:
|
def __bool__(self) -> bool:
|
||||||
return bool(self.annotations or self.references or self.replace or self.thread)
|
return bool(self.references or self.replace or self.thread)
|
||||||
|
|
||||||
|
|
||||||
class RelationsHandler:
|
class RelationsHandler:
|
||||||
|
@ -227,67 +226,6 @@ class RelationsHandler:
|
||||||
e.msg,
|
e.msg,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def get_annotations_for_events(
|
|
||||||
self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset()
|
|
||||||
) -> Dict[str, List[JsonDict]]:
|
|
||||||
"""Get a list of annotations to the given events, grouped by event type and
|
|
||||||
aggregation key, sorted by count.
|
|
||||||
|
|
||||||
This is used e.g. to get the what and how many reactions have happened
|
|
||||||
on an event.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
event_ids: Fetch events that relate to these event IDs.
|
|
||||||
ignored_users: The users ignored by the requesting user.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A map of event IDs to a list of groups of annotations that match.
|
|
||||||
Each entry is a dict with `type`, `key` and `count` fields.
|
|
||||||
"""
|
|
||||||
# Get the base results for all users.
|
|
||||||
full_results = await self._main_store.get_aggregation_groups_for_events(
|
|
||||||
event_ids
|
|
||||||
)
|
|
||||||
|
|
||||||
# Avoid additional logic if there are no ignored users.
|
|
||||||
if not ignored_users:
|
|
||||||
return {
|
|
||||||
event_id: results
|
|
||||||
for event_id, results in full_results.items()
|
|
||||||
if results
|
|
||||||
}
|
|
||||||
|
|
||||||
# Then subtract off the results for any ignored users.
|
|
||||||
ignored_results = await self._main_store.get_aggregation_groups_for_users(
|
|
||||||
[event_id for event_id, results in full_results.items() if results],
|
|
||||||
ignored_users,
|
|
||||||
)
|
|
||||||
|
|
||||||
filtered_results = {}
|
|
||||||
for event_id, results in full_results.items():
|
|
||||||
# If no annotations, skip.
|
|
||||||
if not results:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# If there are not ignored results for this event, copy verbatim.
|
|
||||||
if event_id not in ignored_results:
|
|
||||||
filtered_results[event_id] = results
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Otherwise, subtract out the ignored results.
|
|
||||||
event_ignored_results = ignored_results[event_id]
|
|
||||||
for result in results:
|
|
||||||
key = (result["type"], result["key"])
|
|
||||||
if key in event_ignored_results:
|
|
||||||
# Ensure to not modify the cache.
|
|
||||||
result = result.copy()
|
|
||||||
result["count"] -= event_ignored_results[key]
|
|
||||||
if result["count"] <= 0:
|
|
||||||
continue
|
|
||||||
filtered_results.setdefault(event_id, []).append(result)
|
|
||||||
|
|
||||||
return filtered_results
|
|
||||||
|
|
||||||
async def get_references_for_events(
|
async def get_references_for_events(
|
||||||
self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset()
|
self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset()
|
||||||
) -> Dict[str, List[_RelatedEvent]]:
|
) -> Dict[str, List[_RelatedEvent]]:
|
||||||
|
@ -531,17 +469,6 @@ class RelationsHandler:
|
||||||
# (as that is what makes it part of the thread).
|
# (as that is what makes it part of the thread).
|
||||||
relations_by_id[latest_thread_event.event_id] = RelationTypes.THREAD
|
relations_by_id[latest_thread_event.event_id] = RelationTypes.THREAD
|
||||||
|
|
||||||
async def _fetch_annotations() -> None:
|
|
||||||
"""Fetch any annotations (ie, reactions) to bundle with this event."""
|
|
||||||
annotations_by_event_id = await self.get_annotations_for_events(
|
|
||||||
events_by_id.keys(), ignored_users=ignored_users
|
|
||||||
)
|
|
||||||
for event_id, annotations in annotations_by_event_id.items():
|
|
||||||
if annotations:
|
|
||||||
results.setdefault(event_id, BundledAggregations()).annotations = {
|
|
||||||
"chunk": annotations
|
|
||||||
}
|
|
||||||
|
|
||||||
async def _fetch_references() -> None:
|
async def _fetch_references() -> None:
|
||||||
"""Fetch any references to bundle with this event."""
|
"""Fetch any references to bundle with this event."""
|
||||||
references_by_event_id = await self.get_references_for_events(
|
references_by_event_id = await self.get_references_for_events(
|
||||||
|
@ -575,7 +502,6 @@ class RelationsHandler:
|
||||||
await make_deferred_yieldable(
|
await make_deferred_yieldable(
|
||||||
gather_results(
|
gather_results(
|
||||||
(
|
(
|
||||||
run_in_background(_fetch_annotations),
|
|
||||||
run_in_background(_fetch_references),
|
run_in_background(_fetch_references),
|
||||||
run_in_background(_fetch_edits),
|
run_in_background(_fetch_edits),
|
||||||
)
|
)
|
||||||
|
|
|
@ -266,9 +266,6 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
|
||||||
if relates_to:
|
if relates_to:
|
||||||
self._attempt_to_invalidate_cache("get_relations_for_event", (relates_to,))
|
self._attempt_to_invalidate_cache("get_relations_for_event", (relates_to,))
|
||||||
self._attempt_to_invalidate_cache("get_references_for_event", (relates_to,))
|
self._attempt_to_invalidate_cache("get_references_for_event", (relates_to,))
|
||||||
self._attempt_to_invalidate_cache(
|
|
||||||
"get_aggregation_groups_for_event", (relates_to,)
|
|
||||||
)
|
|
||||||
self._attempt_to_invalidate_cache("get_applicable_edit", (relates_to,))
|
self._attempt_to_invalidate_cache("get_applicable_edit", (relates_to,))
|
||||||
self._attempt_to_invalidate_cache("get_thread_summary", (relates_to,))
|
self._attempt_to_invalidate_cache("get_thread_summary", (relates_to,))
|
||||||
self._attempt_to_invalidate_cache("get_thread_participated", (relates_to,))
|
self._attempt_to_invalidate_cache("get_thread_participated", (relates_to,))
|
||||||
|
|
|
@ -2024,10 +2024,6 @@ class PersistEventsStore:
|
||||||
self.store._invalidate_cache_and_stream(
|
self.store._invalidate_cache_and_stream(
|
||||||
txn, self.store.get_relations_for_event, (redacted_relates_to,)
|
txn, self.store.get_relations_for_event, (redacted_relates_to,)
|
||||||
)
|
)
|
||||||
if rel_type == RelationTypes.ANNOTATION:
|
|
||||||
self.store._invalidate_cache_and_stream(
|
|
||||||
txn, self.store.get_aggregation_groups_for_event, (redacted_relates_to,)
|
|
||||||
)
|
|
||||||
if rel_type == RelationTypes.REFERENCE:
|
if rel_type == RelationTypes.REFERENCE:
|
||||||
self.store._invalidate_cache_and_stream(
|
self.store._invalidate_cache_and_stream(
|
||||||
txn, self.store.get_references_for_event, (redacted_relates_to,)
|
txn, self.store.get_references_for_event, (redacted_relates_to,)
|
||||||
|
|
|
@ -1219,9 +1219,6 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
|
||||||
self._invalidate_cache_and_stream( # type: ignore[attr-defined]
|
self._invalidate_cache_and_stream( # type: ignore[attr-defined]
|
||||||
txn, self.get_relations_for_event, cache_tuple # type: ignore[attr-defined]
|
txn, self.get_relations_for_event, cache_tuple # type: ignore[attr-defined]
|
||||||
)
|
)
|
||||||
self._invalidate_cache_and_stream( # type: ignore[attr-defined]
|
|
||||||
txn, self.get_aggregation_groups_for_event, cache_tuple # type: ignore[attr-defined]
|
|
||||||
)
|
|
||||||
self._invalidate_cache_and_stream( # type: ignore[attr-defined]
|
self._invalidate_cache_and_stream( # type: ignore[attr-defined]
|
||||||
txn, self.get_thread_summary, cache_tuple # type: ignore[attr-defined]
|
txn, self.get_thread_summary, cache_tuple # type: ignore[attr-defined]
|
||||||
)
|
)
|
||||||
|
|
|
@ -397,143 +397,6 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||||
)
|
)
|
||||||
return result is not None
|
return result is not None
|
||||||
|
|
||||||
@cached()
|
|
||||||
async def get_aggregation_groups_for_event(
|
|
||||||
self, event_id: str
|
|
||||||
) -> Sequence[JsonDict]:
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@cachedList(
|
|
||||||
cached_method_name="get_aggregation_groups_for_event", list_name="event_ids"
|
|
||||||
)
|
|
||||||
async def get_aggregation_groups_for_events(
|
|
||||||
self, event_ids: Collection[str]
|
|
||||||
) -> Mapping[str, Optional[List[JsonDict]]]:
|
|
||||||
"""Get a list of annotations on the given events, grouped by event type and
|
|
||||||
aggregation key, sorted by count.
|
|
||||||
|
|
||||||
This is used e.g. to get the what and how many reactions have happend
|
|
||||||
on an event.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
event_ids: Fetch events that relate to these event IDs.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A map of event IDs to a list of groups of annotations that match.
|
|
||||||
Each entry is a dict with `type`, `key` and `count` fields.
|
|
||||||
"""
|
|
||||||
# The number of entries to return per event ID.
|
|
||||||
limit = 5
|
|
||||||
|
|
||||||
clause, args = make_in_list_sql_clause(
|
|
||||||
self.database_engine, "relates_to_id", event_ids
|
|
||||||
)
|
|
||||||
args.append(RelationTypes.ANNOTATION)
|
|
||||||
|
|
||||||
sql = f"""
|
|
||||||
SELECT
|
|
||||||
relates_to_id,
|
|
||||||
annotation.type,
|
|
||||||
aggregation_key,
|
|
||||||
COUNT(DISTINCT annotation.sender)
|
|
||||||
FROM events AS annotation
|
|
||||||
INNER JOIN event_relations USING (event_id)
|
|
||||||
INNER JOIN events AS parent ON
|
|
||||||
parent.event_id = relates_to_id
|
|
||||||
AND parent.room_id = annotation.room_id
|
|
||||||
WHERE
|
|
||||||
{clause}
|
|
||||||
AND relation_type = ?
|
|
||||||
GROUP BY relates_to_id, annotation.type, aggregation_key
|
|
||||||
ORDER BY relates_to_id, COUNT(*) DESC
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _get_aggregation_groups_for_events_txn(
|
|
||||||
txn: LoggingTransaction,
|
|
||||||
) -> Mapping[str, List[JsonDict]]:
|
|
||||||
txn.execute(sql, args)
|
|
||||||
|
|
||||||
result: Dict[str, List[JsonDict]] = {}
|
|
||||||
for event_id, type, key, count in cast(
|
|
||||||
List[Tuple[str, str, str, int]], txn
|
|
||||||
):
|
|
||||||
event_results = result.setdefault(event_id, [])
|
|
||||||
|
|
||||||
# Limit the number of results per event ID.
|
|
||||||
if len(event_results) == limit:
|
|
||||||
continue
|
|
||||||
|
|
||||||
event_results.append({"type": type, "key": key, "count": count})
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
return await self.db_pool.runInteraction(
|
|
||||||
"get_aggregation_groups_for_events", _get_aggregation_groups_for_events_txn
|
|
||||||
)
|
|
||||||
|
|
||||||
async def get_aggregation_groups_for_users(
|
|
||||||
self, event_ids: Collection[str], users: FrozenSet[str]
|
|
||||||
) -> Dict[str, Dict[Tuple[str, str], int]]:
|
|
||||||
"""Fetch the partial aggregations for an event for specific users.
|
|
||||||
|
|
||||||
This is used, in conjunction with get_aggregation_groups_for_event, to
|
|
||||||
remove information from the results for ignored users.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
event_ids: Fetch events that relate to these event IDs.
|
|
||||||
users: The users to fetch information for.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A map of event ID to a map of (event type, aggregation key) to a
|
|
||||||
count of users.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not users:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
events_sql, args = make_in_list_sql_clause(
|
|
||||||
self.database_engine, "relates_to_id", event_ids
|
|
||||||
)
|
|
||||||
|
|
||||||
users_sql, users_args = make_in_list_sql_clause(
|
|
||||||
self.database_engine, "annotation.sender", users
|
|
||||||
)
|
|
||||||
args.extend(users_args)
|
|
||||||
args.append(RelationTypes.ANNOTATION)
|
|
||||||
|
|
||||||
sql = f"""
|
|
||||||
SELECT
|
|
||||||
relates_to_id,
|
|
||||||
annotation.type,
|
|
||||||
aggregation_key,
|
|
||||||
COUNT(DISTINCT annotation.sender)
|
|
||||||
FROM events AS annotation
|
|
||||||
INNER JOIN event_relations USING (event_id)
|
|
||||||
INNER JOIN events AS parent ON
|
|
||||||
parent.event_id = relates_to_id
|
|
||||||
AND parent.room_id = annotation.room_id
|
|
||||||
WHERE {events_sql} AND {users_sql} AND relation_type = ?
|
|
||||||
GROUP BY relates_to_id, annotation.type, aggregation_key
|
|
||||||
ORDER BY relates_to_id, COUNT(*) DESC
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _get_aggregation_groups_for_users_txn(
|
|
||||||
txn: LoggingTransaction,
|
|
||||||
) -> Dict[str, Dict[Tuple[str, str], int]]:
|
|
||||||
txn.execute(sql, args)
|
|
||||||
|
|
||||||
result: Dict[str, Dict[Tuple[str, str], int]] = {}
|
|
||||||
for event_id, type, key, count in cast(
|
|
||||||
List[Tuple[str, str, str, int]], txn
|
|
||||||
):
|
|
||||||
result.setdefault(event_id, {})[(type, key)] = count
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
return await self.db_pool.runInteraction(
|
|
||||||
"get_aggregation_groups_for_users", _get_aggregation_groups_for_users_txn
|
|
||||||
)
|
|
||||||
|
|
||||||
@cached()
|
@cached()
|
||||||
async def get_references_for_event(self, event_id: str) -> List[JsonDict]:
|
async def get_references_for_event(self, event_id: str) -> List[JsonDict]:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
|
@ -1080,48 +1080,6 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
]
|
]
|
||||||
assert_bundle(self._find_event_in_chunk(chunk))
|
assert_bundle(self._find_event_in_chunk(chunk))
|
||||||
|
|
||||||
def test_annotation(self) -> None:
|
|
||||||
"""
|
|
||||||
Test that annotations get correctly bundled.
|
|
||||||
"""
|
|
||||||
# Setup by sending a variety of relations.
|
|
||||||
self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a")
|
|
||||||
self._send_relation(
|
|
||||||
RelationTypes.ANNOTATION, "m.reaction", "a", access_token=self.user2_token
|
|
||||||
)
|
|
||||||
self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "b")
|
|
||||||
|
|
||||||
def assert_annotations(bundled_aggregations: JsonDict) -> None:
|
|
||||||
self.assertEqual(
|
|
||||||
{
|
|
||||||
"chunk": [
|
|
||||||
{"type": "m.reaction", "key": "a", "count": 2},
|
|
||||||
{"type": "m.reaction", "key": "b", "count": 1},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
bundled_aggregations,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._test_bundled_aggregations(RelationTypes.ANNOTATION, assert_annotations, 7)
|
|
||||||
|
|
||||||
def test_annotation_to_annotation(self) -> None:
|
|
||||||
"""Any relation to an annotation should be ignored."""
|
|
||||||
channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a")
|
|
||||||
event_id = channel.json_body["event_id"]
|
|
||||||
self._send_relation(
|
|
||||||
RelationTypes.ANNOTATION, "m.reaction", "b", parent_id=event_id
|
|
||||||
)
|
|
||||||
|
|
||||||
# Fetch the initial annotation event to see if it has bundled aggregations.
|
|
||||||
channel = self.make_request(
|
|
||||||
"GET",
|
|
||||||
f"/_matrix/client/v3/rooms/{self.room}/event/{event_id}",
|
|
||||||
access_token=self.user_token,
|
|
||||||
)
|
|
||||||
self.assertEquals(200, channel.code, channel.json_body)
|
|
||||||
# The first annotationt should not have any bundled aggregations.
|
|
||||||
self.assertNotIn("m.relations", channel.json_body["unsigned"])
|
|
||||||
|
|
||||||
def test_reference(self) -> None:
|
def test_reference(self) -> None:
|
||||||
"""
|
"""
|
||||||
Test that references get correctly bundled.
|
Test that references get correctly bundled.
|
||||||
|
@ -1138,7 +1096,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
bundled_aggregations,
|
bundled_aggregations,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._test_bundled_aggregations(RelationTypes.REFERENCE, assert_annotations, 7)
|
self._test_bundled_aggregations(RelationTypes.REFERENCE, assert_annotations, 6)
|
||||||
|
|
||||||
def test_thread(self) -> None:
|
def test_thread(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -1183,7 +1141,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
|
|
||||||
# The "user" sent the root event and is making queries for the bundled
|
# The "user" sent the root event and is making queries for the bundled
|
||||||
# aggregations: they have participated.
|
# aggregations: they have participated.
|
||||||
self._test_bundled_aggregations(RelationTypes.THREAD, _gen_assert(True), 7)
|
self._test_bundled_aggregations(RelationTypes.THREAD, _gen_assert(True), 6)
|
||||||
# The "user2" sent replies in the thread and is making queries for the
|
# The "user2" sent replies in the thread and is making queries for the
|
||||||
# bundled aggregations: they have participated.
|
# bundled aggregations: they have participated.
|
||||||
#
|
#
|
||||||
|
@ -1208,9 +1166,10 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
channel = self._send_relation(RelationTypes.THREAD, "m.room.test")
|
channel = self._send_relation(RelationTypes.THREAD, "m.room.test")
|
||||||
thread_2 = channel.json_body["event_id"]
|
thread_2 = channel.json_body["event_id"]
|
||||||
|
|
||||||
self._send_relation(
|
channel = self._send_relation(
|
||||||
RelationTypes.ANNOTATION, "m.reaction", "a", parent_id=thread_2
|
RelationTypes.REFERENCE, "org.matrix.test", parent_id=thread_2
|
||||||
)
|
)
|
||||||
|
reference_event_id = channel.json_body["event_id"]
|
||||||
|
|
||||||
def assert_thread(bundled_aggregations: JsonDict) -> None:
|
def assert_thread(bundled_aggregations: JsonDict) -> None:
|
||||||
self.assertEqual(2, bundled_aggregations.get("count"))
|
self.assertEqual(2, bundled_aggregations.get("count"))
|
||||||
|
@ -1235,17 +1194,15 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
self.assert_dict(
|
self.assert_dict(
|
||||||
{
|
{
|
||||||
"m.relations": {
|
"m.relations": {
|
||||||
RelationTypes.ANNOTATION: {
|
RelationTypes.REFERENCE: {
|
||||||
"chunk": [
|
"chunk": [{"event_id": reference_event_id}]
|
||||||
{"type": "m.reaction", "key": "a", "count": 1},
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
bundled_aggregations["latest_event"].get("unsigned"),
|
bundled_aggregations["latest_event"].get("unsigned"),
|
||||||
)
|
)
|
||||||
|
|
||||||
self._test_bundled_aggregations(RelationTypes.THREAD, assert_thread, 7)
|
self._test_bundled_aggregations(RelationTypes.THREAD, assert_thread, 6)
|
||||||
|
|
||||||
def test_nested_thread(self) -> None:
|
def test_nested_thread(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -1363,10 +1320,11 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
channel = self._send_relation(RelationTypes.THREAD, "m.room.test")
|
channel = self._send_relation(RelationTypes.THREAD, "m.room.test")
|
||||||
thread_id = channel.json_body["event_id"]
|
thread_id = channel.json_body["event_id"]
|
||||||
|
|
||||||
# Annotate the thread.
|
# Make a reference to the thread.
|
||||||
self._send_relation(
|
channel = self._send_relation(
|
||||||
RelationTypes.ANNOTATION, "m.reaction", "a", parent_id=thread_id
|
RelationTypes.REFERENCE, "org.matrix.test", parent_id=thread_id
|
||||||
)
|
)
|
||||||
|
reference_event_id = channel.json_body["event_id"]
|
||||||
|
|
||||||
channel = self.make_request(
|
channel = self.make_request(
|
||||||
"GET",
|
"GET",
|
||||||
|
@ -1377,9 +1335,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
channel.json_body["unsigned"].get("m.relations"),
|
channel.json_body["unsigned"].get("m.relations"),
|
||||||
{
|
{
|
||||||
RelationTypes.ANNOTATION: {
|
RelationTypes.REFERENCE: {"chunk": [{"event_id": reference_event_id}]},
|
||||||
"chunk": [{"count": 1, "key": "a", "type": "m.reaction"}]
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1396,9 +1352,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
thread_message["unsigned"].get("m.relations"),
|
thread_message["unsigned"].get("m.relations"),
|
||||||
{
|
{
|
||||||
RelationTypes.ANNOTATION: {
|
RelationTypes.REFERENCE: {"chunk": [{"event_id": reference_event_id}]},
|
||||||
"chunk": [{"count": 1, "key": "a", "type": "m.reaction"}]
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1410,7 +1364,8 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
Note that the spec allows for a server to return additional fields beyond
|
Note that the spec allows for a server to return additional fields beyond
|
||||||
what is specified.
|
what is specified.
|
||||||
"""
|
"""
|
||||||
self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a")
|
channel = self._send_relation(RelationTypes.REFERENCE, "org.matrix.test")
|
||||||
|
reference_event_id = channel.json_body["event_id"]
|
||||||
|
|
||||||
# Note that the sync filter does not include "unsigned" as a field.
|
# Note that the sync filter does not include "unsigned" as a field.
|
||||||
filter = urllib.parse.quote_plus(
|
filter = urllib.parse.quote_plus(
|
||||||
|
@ -1428,7 +1383,12 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
|
||||||
|
|
||||||
# Ensure there's bundled aggregations on it.
|
# Ensure there's bundled aggregations on it.
|
||||||
self.assertIn("unsigned", parent_event)
|
self.assertIn("unsigned", parent_event)
|
||||||
self.assertIn("m.relations", parent_event["unsigned"])
|
self.assertEqual(
|
||||||
|
parent_event["unsigned"].get("m.relations"),
|
||||||
|
{
|
||||||
|
RelationTypes.REFERENCE: {"chunk": [{"event_id": reference_event_id}]},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class RelationIgnoredUserTestCase(BaseRelationsTestCase):
|
class RelationIgnoredUserTestCase(BaseRelationsTestCase):
|
||||||
|
@ -1475,53 +1435,8 @@ class RelationIgnoredUserTestCase(BaseRelationsTestCase):
|
||||||
|
|
||||||
return before_aggregations[relation_type], after_aggregations[relation_type]
|
return before_aggregations[relation_type], after_aggregations[relation_type]
|
||||||
|
|
||||||
def test_annotation(self) -> None:
|
|
||||||
"""Annotations should ignore"""
|
|
||||||
# Send 2 from us, 2 from the to be ignored user.
|
|
||||||
allowed_event_ids = []
|
|
||||||
ignored_event_ids = []
|
|
||||||
channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", key="a")
|
|
||||||
allowed_event_ids.append(channel.json_body["event_id"])
|
|
||||||
channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", key="b")
|
|
||||||
allowed_event_ids.append(channel.json_body["event_id"])
|
|
||||||
channel = self._send_relation(
|
|
||||||
RelationTypes.ANNOTATION,
|
|
||||||
"m.reaction",
|
|
||||||
key="a",
|
|
||||||
access_token=self.user2_token,
|
|
||||||
)
|
|
||||||
ignored_event_ids.append(channel.json_body["event_id"])
|
|
||||||
channel = self._send_relation(
|
|
||||||
RelationTypes.ANNOTATION,
|
|
||||||
"m.reaction",
|
|
||||||
key="c",
|
|
||||||
access_token=self.user2_token,
|
|
||||||
)
|
|
||||||
ignored_event_ids.append(channel.json_body["event_id"])
|
|
||||||
|
|
||||||
before_aggregations, after_aggregations = self._test_ignored_user(
|
|
||||||
RelationTypes.ANNOTATION, allowed_event_ids, ignored_event_ids
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertCountEqual(
|
|
||||||
before_aggregations["chunk"],
|
|
||||||
[
|
|
||||||
{"type": "m.reaction", "key": "a", "count": 2},
|
|
||||||
{"type": "m.reaction", "key": "b", "count": 1},
|
|
||||||
{"type": "m.reaction", "key": "c", "count": 1},
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertCountEqual(
|
|
||||||
after_aggregations["chunk"],
|
|
||||||
[
|
|
||||||
{"type": "m.reaction", "key": "a", "count": 1},
|
|
||||||
{"type": "m.reaction", "key": "b", "count": 1},
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_reference(self) -> None:
|
def test_reference(self) -> None:
|
||||||
"""Annotations should ignore"""
|
"""Aggregations should exclude reference relations from ignored users"""
|
||||||
channel = self._send_relation(RelationTypes.REFERENCE, "m.room.test")
|
channel = self._send_relation(RelationTypes.REFERENCE, "m.room.test")
|
||||||
allowed_event_ids = [channel.json_body["event_id"]]
|
allowed_event_ids = [channel.json_body["event_id"]]
|
||||||
|
|
||||||
|
@ -1544,7 +1459,7 @@ class RelationIgnoredUserTestCase(BaseRelationsTestCase):
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_thread(self) -> None:
|
def test_thread(self) -> None:
|
||||||
"""Annotations should ignore"""
|
"""Aggregations should exclude thread releations from ignored users"""
|
||||||
channel = self._send_relation(RelationTypes.THREAD, "m.room.test")
|
channel = self._send_relation(RelationTypes.THREAD, "m.room.test")
|
||||||
allowed_event_ids = [channel.json_body["event_id"]]
|
allowed_event_ids = [channel.json_body["event_id"]]
|
||||||
|
|
||||||
|
@ -1618,43 +1533,6 @@ class RelationRedactionTestCase(BaseRelationsTestCase):
|
||||||
for t in threads
|
for t in threads
|
||||||
]
|
]
|
||||||
|
|
||||||
def test_redact_relation_annotation(self) -> None:
|
|
||||||
"""
|
|
||||||
Test that annotations of an event are properly handled after the
|
|
||||||
annotation is redacted.
|
|
||||||
|
|
||||||
The redacted relation should not be included in bundled aggregations or
|
|
||||||
the response to relations.
|
|
||||||
"""
|
|
||||||
channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a")
|
|
||||||
to_redact_event_id = channel.json_body["event_id"]
|
|
||||||
|
|
||||||
channel = self._send_relation(
|
|
||||||
RelationTypes.ANNOTATION, "m.reaction", "a", access_token=self.user2_token
|
|
||||||
)
|
|
||||||
unredacted_event_id = channel.json_body["event_id"]
|
|
||||||
|
|
||||||
# Both relations should exist.
|
|
||||||
event_ids = self._get_related_events()
|
|
||||||
relations = self._get_bundled_aggregations()
|
|
||||||
self.assertCountEqual(event_ids, [to_redact_event_id, unredacted_event_id])
|
|
||||||
self.assertEquals(
|
|
||||||
relations["m.annotation"],
|
|
||||||
{"chunk": [{"type": "m.reaction", "key": "a", "count": 2}]},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Redact one of the reactions.
|
|
||||||
self._redact(to_redact_event_id)
|
|
||||||
|
|
||||||
# The unredacted relation should still exist.
|
|
||||||
event_ids = self._get_related_events()
|
|
||||||
relations = self._get_bundled_aggregations()
|
|
||||||
self.assertEquals(event_ids, [unredacted_event_id])
|
|
||||||
self.assertEquals(
|
|
||||||
relations["m.annotation"],
|
|
||||||
{"chunk": [{"type": "m.reaction", "key": "a", "count": 1}]},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_redact_relation_thread(self) -> None:
|
def test_redact_relation_thread(self) -> None:
|
||||||
"""
|
"""
|
||||||
Test that thread replies are properly handled after the thread reply redacted.
|
Test that thread replies are properly handled after the thread reply redacted.
|
||||||
|
@ -1775,14 +1653,14 @@ class RelationRedactionTestCase(BaseRelationsTestCase):
|
||||||
is redacted.
|
is redacted.
|
||||||
"""
|
"""
|
||||||
# Add a relation
|
# Add a relation
|
||||||
channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", key="👍")
|
channel = self._send_relation(RelationTypes.REFERENCE, "org.matrix.test")
|
||||||
related_event_id = channel.json_body["event_id"]
|
related_event_id = channel.json_body["event_id"]
|
||||||
|
|
||||||
# The relations should exist.
|
# The relations should exist.
|
||||||
event_ids = self._get_related_events()
|
event_ids = self._get_related_events()
|
||||||
relations = self._get_bundled_aggregations()
|
relations = self._get_bundled_aggregations()
|
||||||
self.assertEqual(len(event_ids), 1)
|
self.assertEqual(len(event_ids), 1)
|
||||||
self.assertIn(RelationTypes.ANNOTATION, relations)
|
self.assertIn(RelationTypes.REFERENCE, relations)
|
||||||
|
|
||||||
# Redact the original event.
|
# Redact the original event.
|
||||||
self._redact(self.parent_id)
|
self._redact(self.parent_id)
|
||||||
|
@ -1792,8 +1670,8 @@ class RelationRedactionTestCase(BaseRelationsTestCase):
|
||||||
relations = self._get_bundled_aggregations()
|
relations = self._get_bundled_aggregations()
|
||||||
self.assertEquals(event_ids, [related_event_id])
|
self.assertEquals(event_ids, [related_event_id])
|
||||||
self.assertEquals(
|
self.assertEquals(
|
||||||
relations["m.annotation"],
|
relations[RelationTypes.REFERENCE],
|
||||||
{"chunk": [{"type": "m.reaction", "key": "👍", "count": 1}]},
|
{"chunk": [{"event_id": related_event_id}]},
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_redact_parent_thread(self) -> None:
|
def test_redact_parent_thread(self) -> None:
|
||||||
|
|
Loading…
Reference in New Issue