Skip to content

edspdf.layers.sinusoidal_embedding

SinusoidalEmbedding

Bases: torch.nn.Module

A position embedding lookup table that stores embeddings for a fixed number of positions. The value of each of the embedding_dim channels of the generated embedding is generated according to a trigonometric function (sin for even channels, cos for odd channels). The frequency of the signal in each pair of channels varies according to the temperature parameter.

Any input position above the maximum value num_embeddings will be capped to num_embeddings - 1

Source code in edspdf/layers/sinusoidal_embedding.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
@registry.factory.register("sinusoidal-embedding")
class SinusoidalEmbedding(torch.nn.Module):
    """
    A position embedding lookup table that stores embeddings for a fixed number
    of positions.
    The value of each of the `embedding_dim` channels of the generated embedding
    is generated according to a trigonometric function (sin for even channels,
    cos for odd channels).
    The frequency of the signal in each pair of channels varies according to the
    temperature parameter.

    Any input position above the maximum value `num_embeddings` will be capped to
    `num_embeddings - 1`
    """

    def __init__(
        self,
        num_embeddings: int,
        embedding_dim: int,
        temperature: float = 10000.0,
    ):
        """
        Parameters
        ----------
        num_embeddings: int
            The maximum number of position embeddings store in this table
        embedding_dim: int
            The embedding size
        temperature: float
            The temperature controls the range of frequencies used by each
            channel of the embedding
        """
        super().__init__()

        self.embedding_dim = embedding_dim
        self.num_embeddings = num_embeddings
        self.temperature = temperature

        weight = torch.zeros(self.num_embeddings, self.embedding_dim)
        position = torch.arange(0, self.num_embeddings, dtype=torch.float).unsqueeze(1)
        div_term = torch.exp(
            torch.arange(0, self.embedding_dim, 2).float()
            * (-math.log(self.temperature) / self.embedding_dim)
        )
        weight[:, 0::2] = torch.sin(position * div_term)
        weight[:, 1::2] = torch.cos(position * div_term)
        self.register_buffer("weight", weight)

    def extra_repr(self) -> str:
        return f"{self.num_embeddings}, {self.embedding_dim}"

    def forward(self, indices: torch.LongTensor):
        """
        Forward pass of the SinusoidalEmbedding module

        Parameters
        ----------
        indices: torch.LongTensor
            Shape: ...

        Returns
        -------
        torch.FloatTensor
        Shape: `... * embedding_dim`
        """
        res = F.embedding(indices.clamp(0, len(self.weight) - 1), self.weight)
        return res

__init__(num_embeddings, embedding_dim, temperature=10000.0)

PARAMETER DESCRIPTION
num_embeddings

The maximum number of position embeddings store in this table

TYPE: int

embedding_dim

The embedding size

TYPE: int

temperature

The temperature controls the range of frequencies used by each channel of the embedding

TYPE: float DEFAULT: 10000.0

Source code in edspdf/layers/sinusoidal_embedding.py
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
def __init__(
    self,
    num_embeddings: int,
    embedding_dim: int,
    temperature: float = 10000.0,
):
    """
    Parameters
    ----------
    num_embeddings: int
        The maximum number of position embeddings store in this table
    embedding_dim: int
        The embedding size
    temperature: float
        The temperature controls the range of frequencies used by each
        channel of the embedding
    """
    super().__init__()

    self.embedding_dim = embedding_dim
    self.num_embeddings = num_embeddings
    self.temperature = temperature

    weight = torch.zeros(self.num_embeddings, self.embedding_dim)
    position = torch.arange(0, self.num_embeddings, dtype=torch.float).unsqueeze(1)
    div_term = torch.exp(
        torch.arange(0, self.embedding_dim, 2).float()
        * (-math.log(self.temperature) / self.embedding_dim)
    )
    weight[:, 0::2] = torch.sin(position * div_term)
    weight[:, 1::2] = torch.cos(position * div_term)
    self.register_buffer("weight", weight)

forward(indices)

Forward pass of the SinusoidalEmbedding module

PARAMETER DESCRIPTION
indices

Shape: ...

TYPE: torch.LongTensor

RETURNS DESCRIPTION
torch.FloatTensor
Shape
Source code in edspdf/layers/sinusoidal_embedding.py
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
def forward(self, indices: torch.LongTensor):
    """
    Forward pass of the SinusoidalEmbedding module

    Parameters
    ----------
    indices: torch.LongTensor
        Shape: ...

    Returns
    -------
    torch.FloatTensor
    Shape: `... * embedding_dim`
    """
    res = F.embedding(indices.clamp(0, len(self.weight) - 1), self.weight)
    return res