Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

MSC3840 Ignore rooms #13177

Draft
wants to merge 4 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions synapse/api/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,7 @@ class RoomEncryptionAlgorithms:
class AccountDataTypes:
DIRECT: Final = "m.direct"
IGNORED_USER_LIST: Final = "m.ignored_user_list"
IGNORED_INVITE_LIST: Final = "org.matrix.msc3840.ignored_invites"


class HistoryVisibility:
Expand Down
11 changes: 9 additions & 2 deletions synapse/handlers/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -1534,16 +1534,23 @@ async def _generate_sync_entry_for_rooms(

# 3. Work out which rooms need reporting in the sync response.
ignored_users = await self.store.ignored_users(user_id)
# TODO: Make sure the functions below are only called here
# otherwise add the self.store.ignored_rooms and concatenation to those callers.
ignored_rooms = await self.store.ignored_rooms(user_id)
if since_token:
room_changes = await self._get_rooms_changed(
sync_result_builder, ignored_users, self.rooms_to_exclude
sync_result_builder,
ignored_users,
self.rooms_to_exclude + list(ignored_rooms),
)
tags_by_room = await self.store.get_updated_tags(
user_id, since_token.account_data_key
)
else:
room_changes = await self._get_all_rooms(
sync_result_builder, ignored_users, self.rooms_to_exclude
sync_result_builder,
ignored_users,
self.rooms_to_exclude + list(ignored_rooms),
)
tags_by_room = await self.store.get_tags_for_user(user_id)

Expand Down
67 changes: 67 additions & 0 deletions synapse/storage/databases/main/account_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,26 @@ async def ignored_users(self, user_id: str) -> FrozenSet[str]:
)
)

@cached(max_entries=5000, iterable=True)
async def ignored_rooms(self, user_id: str) -> FrozenSet[str]:
"""
Get rooms which the given user has explicitly ignored.
See MSC3840

Params:
user_id: The user ID which is making the request.

Return:
The room IDs which are ignored by the given user."""
return frozenset(
await self.db_pool.simple_select_onecol(
table="ignored_rooms",
keyvalues={"ignorer_user_id": user_id},
retcol="ignored_room_id",
desc="ignored_rooms",
)
)

def process_replication_rows(
self,
stream_name: str,
Expand Down Expand Up @@ -538,6 +558,53 @@ def _add_account_data_for_user(
lock=False,
)

if account_data_type == AccountDataTypes.IGNORED_INVITE_LIST:
previously_ignored_rooms = set(
self.db_pool.simple_select_onecol_txn(
txn,
table="ignored_rooms",
keyvalues={"ignorer_user_id": user_id},
retcol="ignored_room_id",
)
)
# If the data is invalid, no one is ignored.
ignored_rooms_content = content.get("ignored_rooms", [])
if isinstance(ignored_rooms_content, list):
room_ids = []
for room in ignored_rooms_content:
room_id = room.get("room_id")
if room_id:
room_ids.append(room_id)
currently_ignored_rooms = set(room_ids)
else:
currently_ignored_rooms = set()

# If the data has not changed, nothing to do.
if previously_ignored_rooms == currently_ignored_rooms:
return

# Delete entries which are no longer ignored.
self.db_pool.simple_delete_many_txn(
txn,
table="ignored_rooms",
column="ignored_room_id",
values=previously_ignored_rooms - currently_ignored_rooms,
keyvalues={"ignorer_user_id": user_id},
)

# Add entries which are newly ignored.
self.db_pool.simple_insert_many_txn(
txn,
table="ignored_rooms",
keys=("ignorer_user_id", "ignored_room_id"),
values=[
(user_id, r)
for r in currently_ignored_rooms - previously_ignored_rooms
],
)
# Invalidate cache for the user's ignored rooms.
self._invalidate_cache_and_stream(txn, self.ignored_rooms, (user_id,))

# Ignored users get denormalized into a separate table as an optimisation.
if account_data_type != AccountDataTypes.IGNORED_USER_LIST:
return
Expand Down
87 changes: 87 additions & 0 deletions synapse/storage/schema/main/delta/72/01ignored_room.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""
This migration denormalises the account_data table into an ignored rooms table.
TODO: Don't know where this is supposed to go i'm making it up as i go along, copied from ignored users.
"""

import logging
from io import StringIO

from synapse.storage._base import db_to_json
from synapse.storage.engines import BaseDatabaseEngine
from synapse.storage.prepare_database import execute_statements_from_stream
from synapse.storage.types import Cursor

logger = logging.getLogger(__name__)


def run_upgrade(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs):
pass


def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs):
logger.info("Creating ignored_users table")
execute_statements_from_stream(cur, StringIO(_create_commands))

# We now upgrade existing data, if any. We don't do this in `run_upgrade` as
# we a) want to run these before adding constraints and b) `run_upgrade` is
# not run on empty databases.
insert_sql = """
INSERT INTO ignored_rooms (ignorer_user_id, ignored_room_id) VALUES (?, ?)
"""

logger.info("Converting existing ignore lists")
# Shouldn't the constant IGNORED_INVITE_LIST be used here?
cur.execute(
"SELECT user_id, content FROM account_data WHERE account_data_type = 'org.matrix.msc3840.ignored_invites'"
)
for user_id, content_json in cur.fetchall():
content = db_to_json(content_json)

# The content should be the form of a dictionary with a key
# "ignored_rooms" pointing to a list dictionaries with a key room_id.
#
# { "ignored_rooms": [{"room_id": "!iojfoijwefoij:matrix.org", ts: 19019092}] }
ignored_rooms = content.get("ignored_rooms", [])
if isinstance(ignored_rooms, list):
# There has to be a better way of doing this i'm just bad at Python.
room_ids = []
for room in ignored_rooms:
room_id = room.get("room_id")
if room_id:
room_ids.append(room_id)
cur.execute_batch(insert_sql, [(user_id, r) for r in room_ids])

# Add indexes after inserting data for efficiency.
logger.info("Adding constraints to ignored_rooms table")
execute_statements_from_stream(cur, StringIO(_constraints_commands))


# there might be duplicates, so the easiest way to achieve this is to create a new
# table with the right data, and renaming it into place

_create_commands = """
-- Rooms which are ignored when calculating push notifications. This data is
-- denormalized from account data.
CREATE TABLE IF NOT EXISTS ignored_rooms(
ignorer_user_id TEXT NOT NULL, -- The user ID of the user who is ignoring another user. (This is a local user.)
ignored_room_id TEXT NOT NULL -- The room ID of the room that is being ignored.
);
"""

_constraints_commands = """
CREATE UNIQUE INDEX ignored_rooms_uniqueness ON ignored_rooms (ignorer_user_id, ignored_room_id);
"""