forked from facebookresearch/xformers
-
Notifications
You must be signed in to change notification settings - Fork 0
/
sine.py
46 lines (36 loc) · 1.33 KB
/
sine.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
# Silence Mypy errors in this file.
# type: ignore
import math
import torch
from xformers.components.positional_embedding import (
PositionEmbedding,
PositionEmbeddingConfig,
register_positional_embedding,
)
@register_positional_embedding("sine", PositionEmbeddingConfig)
class SinePositionalEmbedding(PositionEmbedding):
def __init__(self, dim_model: int, *args, **kwargs):
super().__init__()
self.dim_model = dim_model
def forward(self, x: torch.Tensor) -> torch.Tensor:
seq_len = x.shape[1]
pos = (
torch.arange(0, seq_len, device=x.device, dtype=torch.float32)
.unsqueeze(1)
.repeat(1, self.dim_model)
)
dim = (
torch.arange(0, self.dim_model, device=x.device, dtype=torch.float32)
.unsqueeze(0)
.repeat(seq_len, 1)
)
div = torch.exp(-math.log(10000) * (2 * (dim // 2) / self.dim_model))
pos *= div
pos[:, 0::2] = torch.sin(pos[:, 0::2])
pos[:, 1::2] = torch.cos(pos[:, 1::2])
output = x.unsqueeze(-1) if x.ndim == 2 else x
return output + pos.unsqueeze(0)