Skip to content
This repository has been archived by the owner on Oct 25, 2024. It is now read-only.

Commit

Permalink
pylint
Browse files Browse the repository at this point in the history
Signed-off-by: n1ck-guo <heng.guo@intel.com>
  • Loading branch information
n1ck-guo committed Jun 27, 2024
1 parent 9698230 commit 14f5a6d
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@
CausalLMOutputWithPast,
)

from intel_extension_for_transformers.transformers.modeling.modeling_gaudi import adapt_transformers_to_gaudi
from ..prune import PruneConfig, H2OConfig

logger = logging.get_logger(__name__)
Expand Down Expand Up @@ -920,12 +919,12 @@ def _update_causal_mask(
# When output attentions is True, sdpa implementation's forward method calls the eager implementation's forward
if self.config._attn_implementation == "sdpa" \
and not using_static_cache and not output_attentions: # pylint: disable=E1101
if AttentionMaskConverter._ignore_causal_mask_sdpa(
if AttentionMaskConverter._ignore_causal_mask_sdpa( # pylint: disable=E1101
attention_mask,
inputs_embeds=input_tensor,
past_key_values_length=past_seen_tokens,
is_training=self.training,
): # pylint: disable=E1101
):
return None

dtype, device = input_tensor.dtype, input_tensor.device
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,11 @@ def get_hh_mask(heavy_budget_ratio, recent_budget_ratio, attn_weights, local=Tru

zeros = torch.zeros_like(tmp_sum, dtype=torch.bool)
mask_bottom = zeros.scatter(-1, tmp_topk, True).unsqueeze(2)
mask_bottom = mask_bottom.expand(mask_bottom.shape[0], mask_bottom.shape[1], attn_weights.shape[-2], mask_bottom.shape[-1])
mask_bottom = mask_bottom.expand(
mask_bottom.shape[0],
mask_bottom.shape[1],
attn_weights.shape[-2],
mask_bottom.shape[-1])
else:
mask_bottom = torch.zeros_like(attn_weights, dtype=torch.bool)

Expand Down

0 comments on commit 14f5a6d

Please sign in to comment.