Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Do not allow a None-limit on PaginationConfig. (#14146)
Browse files Browse the repository at this point in the history
The callers either set a default limit or manually handle a None-limit
later on (by setting a default value).

Update the callers to always instantiate PaginationConfig with a default
limit and then assume the limit is non-None.
  • Loading branch information
clokep authored Oct 14, 2022
1 parent c744690 commit 126a157
Show file tree
Hide file tree
Showing 16 changed files with 29 additions and 50 deletions.
1 change: 1 addition & 0 deletions changelog.d/14146.removal
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Remove the unstable identifier for [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715).
2 changes: 1 addition & 1 deletion synapse/handlers/account_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ async def get_new_events(
self,
user: UserID,
from_key: int,
limit: Optional[int],
limit: int,
room_ids: Collection[str],
is_guest: bool,
explicit_room_id: Optional[str] = None,
Expand Down
27 changes: 4 additions & 23 deletions synapse/handlers/initial_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,7 @@ def __init__(self, hs: "HomeServer"):
self.validator = EventValidator()
self.snapshot_cache: ResponseCache[
Tuple[
str,
Optional[StreamToken],
Optional[StreamToken],
str,
Optional[int],
bool,
bool,
str, Optional[StreamToken], Optional[StreamToken], str, int, bool, bool
]
] = ResponseCache(hs.get_clock(), "initial_sync_cache")
self._event_serializer = hs.get_event_client_serializer()
Expand Down Expand Up @@ -154,11 +148,6 @@ async def _snapshot_all_rooms(

public_room_ids = await self.store.get_public_room_ids()

if pagin_config.limit is not None:
limit = pagin_config.limit
else:
limit = 10

serializer_options = SerializeEventConfig(as_client_event=as_client_event)

async def handle_room(event: RoomsForUser) -> None:
Expand Down Expand Up @@ -210,7 +199,7 @@ async def handle_room(event: RoomsForUser) -> None:
run_in_background(
self.store.get_recent_events_for_room,
event.room_id,
limit=limit,
limit=pagin_config.limit,
end_token=room_end_token,
),
deferred_room_state,
Expand Down Expand Up @@ -360,15 +349,11 @@ async def _room_initial_sync_parted(
member_event_id
)

limit = pagin_config.limit if pagin_config else None
if limit is None:
limit = 10

leave_position = await self.store.get_position_for_event(member_event_id)
stream_token = leave_position.to_room_stream_token()

messages, token = await self.store.get_recent_events_for_room(
room_id, limit=limit, end_token=stream_token
room_id, limit=pagin_config.limit, end_token=stream_token
)

messages = await filter_events_for_client(
Expand Down Expand Up @@ -420,10 +405,6 @@ async def _room_initial_sync_joined(

now_token = self.hs.get_event_sources().get_current_token()

limit = pagin_config.limit if pagin_config else None
if limit is None:
limit = 10

room_members = [
m
for m in current_state.values()
Expand Down Expand Up @@ -467,7 +448,7 @@ async def get_receipts() -> List[JsonDict]:
run_in_background(
self.store.get_recent_events_for_room,
room_id,
limit=limit,
limit=pagin_config.limit,
end_token=now_token.room_key,
),
),
Expand Down
5 changes: 0 additions & 5 deletions synapse/handlers/pagination.py
Original file line number Diff line number Diff line change
Expand Up @@ -458,11 +458,6 @@ async def get_messages(
# `/messages` should still works with live tokens when manually provided.
assert from_token.room_key.topological is not None

if pagin_config.limit is None:
# This shouldn't happen as we've set a default limit before this
# gets called.
raise Exception("limit not set")

room_token = from_token.room_key

async with self.pagination_lock.read(room_id):
Expand Down
4 changes: 3 additions & 1 deletion synapse/handlers/presence.py
Original file line number Diff line number Diff line change
Expand Up @@ -1596,7 +1596,9 @@ async def get_new_events(
self,
user: UserID,
from_key: Optional[int],
limit: Optional[int] = None,
# Having a default limit doesn't match the EventSource API, but some
# callers do not provide it. It is unused in this class.
limit: int = 0,
room_ids: Optional[Collection[str]] = None,
is_guest: bool = False,
explicit_room_id: Optional[str] = None,
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/receipts.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ async def get_new_events(
self,
user: UserID,
from_key: int,
limit: Optional[int],
limit: int,
room_ids: Iterable[str],
is_guest: bool,
explicit_room_id: Optional[str] = None,
Expand Down
3 changes: 0 additions & 3 deletions synapse/handlers/relations.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,9 +116,6 @@ async def get_relations(
if event is None:
raise SynapseError(404, "Unknown parent event.")

# TODO Update pagination config to not allow None limits.
assert pagin_config.limit is not None

# Note that ignored users are not passed into get_relations_for_event
# below. Ignored users are handled in filter_events_for_client (and by
# not passing them in here we should get a better cache hit rate).
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/room.py
Original file line number Diff line number Diff line change
Expand Up @@ -1646,7 +1646,7 @@ async def get_new_events(
self,
user: UserID,
from_key: RoomStreamToken,
limit: Optional[int],
limit: int,
room_ids: Collection[str],
is_guest: bool,
explicit_room_id: Optional[str] = None,
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -513,7 +513,7 @@ async def get_new_events(
self,
user: UserID,
from_key: int,
limit: Optional[int],
limit: int,
room_ids: Iterable[str],
is_guest: bool,
explicit_room_id: Optional[str] = None,
Expand Down
4 changes: 3 additions & 1 deletion synapse/rest/client/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,9 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
raise SynapseError(400, "Guest users must specify room_id param")
room_id = parse_string(request, "room_id")

pagin_config = await PaginationConfig.from_request(self.store, request)
pagin_config = await PaginationConfig.from_request(
self.store, request, default_limit=10
)
timeout = EventStreamRestServlet.DEFAULT_LONGPOLL_TIME_MS
if b"timeout" in args:
try:
Expand Down
4 changes: 3 additions & 1 deletion synapse/rest/client/initial_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request)
args: Dict[bytes, List[bytes]] = request.args # type: ignore
as_client_event = b"raw" not in args
pagination_config = await PaginationConfig.from_request(self.store, request)
pagination_config = await PaginationConfig.from_request(
self.store, request, default_limit=10
)
include_archived = parse_boolean(request, "archived", default=False)
content = await self.initial_sync_handler.snapshot_all_rooms(
user_id=requester.user.to_string(),
Expand Down
4 changes: 3 additions & 1 deletion synapse/rest/client/room.py
Original file line number Diff line number Diff line change
Expand Up @@ -729,7 +729,9 @@ async def on_GET(
self, request: SynapseRequest, room_id: str
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request, allow_guest=True)
pagination_config = await PaginationConfig.from_request(self.store, request)
pagination_config = await PaginationConfig.from_request(
self.store, request, default_limit=10
)
content = await self.initial_sync_handler.room_initial_sync(
room_id=room_id, requester=requester, pagin_config=pagination_config
)
Expand Down
2 changes: 0 additions & 2 deletions synapse/storage/databases/main/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -1200,8 +1200,6 @@ def _paginate_room_events_txn(
`to_token`), or `limit` is zero.
"""

assert int(limit) >= 0

# Tokens really represent positions between elements, but we use
# the convention of pointing to the event before the gap. Hence
# we have a bit of asymmetry when it comes to equalities.
Expand Down
2 changes: 1 addition & 1 deletion synapse/streams/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ async def get_new_events(
self,
user: UserID,
from_key: K,
limit: Optional[int],
limit: int,
room_ids: Collection[str],
is_guest: bool,
explicit_room_id: Optional[str] = None,
Expand Down
12 changes: 5 additions & 7 deletions synapse/streams/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,14 @@ class PaginationConfig:
from_token: Optional[StreamToken]
to_token: Optional[StreamToken]
direction: str
limit: Optional[int]
limit: int

@classmethod
async def from_request(
cls,
store: "DataStore",
request: SynapseRequest,
default_limit: Optional[int] = None,
default_limit: int,
default_dir: str = "f",
) -> "PaginationConfig":
direction = parse_string(
Expand All @@ -69,12 +69,10 @@ async def from_request(
raise SynapseError(400, "'to' parameter is invalid")

limit = parse_integer(request, "limit", default=default_limit)
if limit < 0:
raise SynapseError(400, "Limit must be 0 or above")

if limit:
if limit < 0:
raise SynapseError(400, "Limit must be 0 or above")

limit = min(int(limit), MAX_LIMIT)
limit = min(limit, MAX_LIMIT)

try:
return PaginationConfig(from_tok, to_tok, direction, limit)
Expand Down
3 changes: 2 additions & 1 deletion tests/rest/client/test_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ def test_set_typing(self) -> None:
self.event_source.get_new_events(
user=UserID.from_string(self.user_id),
from_key=0,
limit=None,
# Limit is unused.
limit=0,
room_ids=[self.room_id],
is_guest=False,
)
Expand Down

0 comments on commit 126a157

Please sign in to comment.