EvoAdvisor / evo_model.py
HemanM's picture
Update evo_model.py
c4358b8 verified
raw
history blame
1.28 kB
import torch
import torch.nn as nn
import torch.nn.functional as F
class EvoTransformerV22(nn.Module):
def __init__(self, d_model=384, num_heads=6, ffn_dim=1024, num_layers=6, memory_enabled=False):
super().__init__()
self.embedding = nn.Embedding(30522, d_model)
self.memory_enabled = memory_enabled
self.memory_token = nn.Parameter(torch.zeros(1, 1, d_model)) if memory_enabled else None
encoder_layer = nn.TransformerEncoderLayer(
d_model=d_model,
nhead=num_heads,
dim_feedforward=ffn_dim,
batch_first=True
)
self.transformer = nn.TransformerEncoder(encoder_layer, num_layers=num_layers)
self.pool = nn.AdaptiveAvgPool1d(1)
self.classifier = nn.Sequential(
nn.Linear(d_model, 128),
nn.ReLU(),
nn.Linear(128, 2) # Binary classification
)
def forward(self, input_ids):
x = self.embedding(input_ids)
if self.memory_enabled and self.memory_token is not None:
mem = self.memory_token.expand(x.size(0), 1, x.size(2))
x = torch.cat([mem, x], dim=1)
x = self.transformer(x)
x = self.pool(x.transpose(1, 2)).squeeze(-1)
return self.classifier(x)