File size: 1,185 Bytes
8d4b0c7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import torch
import torch.nn as nn


class UstaCausalAttention(nn.Module):
  def __init__(self, embedding_dim, output_dim, context_length, dropout_rate = 0):
    super().__init__()
    self.embedding_dim = embedding_dim

    self.q_weights = nn.Linear(embedding_dim, output_dim, bias=False)
    self.k_weights = nn.Linear(embedding_dim, output_dim, bias=False)
    self.v_weights = nn.Linear(embedding_dim, output_dim, bias=False)
    self.dropout = nn.Dropout(dropout_rate)
    self.register_buffer("mask", torch.tril(torch.ones(context_length, context_length)))
    self.context_length = context_length

  def forward(self, x):
    number_of_tokens = x.shape[0]
    # truncate the context length to the context length of the model
    x = x[:self.context_length]
    q = self.q_weights(x)
    k = self.k_weights(x)
    v = self.v_weights(x)

    attention_scores = q @ k.T
    attention_scores = attention_scores.masked_fill_(
      self.mask.bool()[:number_of_tokens, :number_of_tokens] == 0, -torch.inf
    )
    attention_scores = torch.softmax(attention_scores / k.shape[-1] ** 0.5, dim=1)
    attention_scores = self.dropout(attention_scores)

    return attention_scores @ v