Skip to content

Commit

Permalink
same feature map for q and k
Browse files Browse the repository at this point in the history
  • Loading branch information
blefaudeux committed Jan 18, 2022
1 parent 60e94e5 commit f3b613c
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions xformers/components/attention/favor.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,7 @@ def __init__(
"normalize_inputs": self.normalize_inputs,
}

self.feature_map_query: FeatureMap = feature_map_constructor(**feature_settings) # type: ignore
self.feature_map_key: FeatureMap = feature_map_constructor(**feature_settings) # type: ignore
self.feature_map: FeatureMap = feature_map_constructor(**feature_settings) # type: ignore

@staticmethod
def _maybe_promote(x: torch.Tensor) -> torch.Tensor:
Expand Down Expand Up @@ -135,8 +134,8 @@ def forward(
):

# Project key and queries onto the feature map space
k_prime = self.feature_map_key(k)
q_prime = self.feature_map_query(q)
k_prime = self.feature_map(k)
q_prime = self.feature_map(q)

with autocast(enabled=False):
# The softmax kernel approximation for Favor will easily overflow
Expand Down

0 comments on commit f3b613c

Please sign in to comment.