diff options
| author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-10-27 22:13:54 +0200 |
|---|---|---|
| committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-10-27 22:13:54 +0200 |
| commit | fb90a53b1235fd836dee74452f3f2a621e0f363a (patch) | |
| tree | daae44aa5e7c1309a41a059594ce0c3fc92cbc26 /text_recognizer/networks/transformer/embeddings/absolute.py | |
| parent | 8c7a59d58e2ce6b18384c9fcdba2fd49e5450b0e (diff) | |
Rename transformer embeddings
Diffstat (limited to 'text_recognizer/networks/transformer/embeddings/absolute.py')
| -rw-r--r-- | text_recognizer/networks/transformer/embeddings/absolute.py | 17 |
1 files changed, 17 insertions, 0 deletions
diff --git a/text_recognizer/networks/transformer/embeddings/absolute.py b/text_recognizer/networks/transformer/embeddings/absolute.py new file mode 100644 index 0000000..7140537 --- /dev/null +++ b/text_recognizer/networks/transformer/embeddings/absolute.py @@ -0,0 +1,17 @@ +"""Absolute positional embedding.""" +import torch +from torch import nn, Tensor + + +class AbsolutePositionalEmbedding(nn.Module): + def __init__(self, dim: int, max_seq_len: int) -> None: + super().__init__() + self.emb = nn.Embedding(max_seq_len, dim) + self._weight_init() + + def _weight_init(self) -> None: + nn.init.normal_(self.emb.weight, std=0.02) + + def forward(self, x: Tensor) -> Tensor: + n = torch.arange(x.shape[1], device=x.device) + return self.emb(n)[None, :, :] |