From 43136545b3f30551bb9b6665181e9150293d33e4 Mon Sep 17 00:00:00 2001 From: Zihao Ye Date: Thu, 12 Dec 2024 12:29:20 -0800 Subject: [PATCH] bugfix: fix type annotation error in python3.8 (#656) As mentioned in https://github.com/flashinfer-ai/flashinfer/issues/653, there is a type annotation error in cascade.py because we use `list` which is builtin since python 3.9 (related PR https://github.com/flashinfer-ai/flashinfer/pull/486). --- flashinfer/cascade.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/flashinfer/cascade.py b/flashinfer/cascade.py index 38458bb9..20b91259 100644 --- a/flashinfer/cascade.py +++ b/flashinfer/cascade.py @@ -317,10 +317,10 @@ def __init__( float_workspace_buffer: torch.Tensor, kv_layout: str = "NHD", use_cuda_graph: bool = False, - qo_indptr_buf_arr: Optional[list[torch.Tensor]] = None, - paged_kv_indptr_buf_arr: Optional[list[torch.Tensor]] = None, - paged_kv_indices_buf_arr: Optional[list[torch.Tensor]] = None, - paged_kv_last_page_len_buf_arr: Optional[list[torch.Tensor]] = None, + qo_indptr_buf_arr: Optional[List[torch.Tensor]] = None, + paged_kv_indptr_buf_arr: Optional[List[torch.Tensor]] = None, + paged_kv_indices_buf_arr: Optional[List[torch.Tensor]] = None, + paged_kv_last_page_len_buf_arr: Optional[List[torch.Tensor]] = None, ) -> None: r"""Constructor of :class:`MultiLevelCascadeAttentionWrapper`.