danielle2003 commited on
Commit
fff6ecb
·
verified ·
1 Parent(s): 6d16885

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -12
app.py CHANGED
@@ -126,21 +126,14 @@ class GRUModel(nn.Module):
126
  return self.fc(out[:, -1, :])
127
 
128
  class BiLSTMModel(nn.Module):
129
- def __init__(self, input_dim=1, hidden_dim=100, num_layers=2, output_dim=1, dropout_prob=0.2):
130
  super(BiLSTMModel, self).__init__()
131
- self.lstm = nn.LSTM(
132
- input_size=input_dim,
133
- hidden_size=hidden_dim,
134
- num_layers=num_layers,
135
- batch_first=True,
136
- dropout=dropout_prob,
137
- bidirectional=True
138
- )
139
- self.fc = nn.Linear(hidden_dim * 2, output_dim) # because bidirectional
140
 
141
  def forward(self, x):
142
- h0 = torch.zeros(self.lstm.num_layers * 2, x.size(0), self.lstm.hidden_size).to(x.device)
143
- c0 = torch.zeros(self.lstm.num_layers * 2, x.size(0), self.lstm.hidden_size).to(x.device)
144
  out, _ = self.lstm(x, (h0, c0))
145
  return self.fc(out[:, -1, :])
146
  @st.cache_resource(ttl=3600)
 
126
  return self.fc(out[:, -1, :])
127
 
128
  class BiLSTMModel(nn.Module):
129
+ def __init__(self):
130
  super(BiLSTMModel, self).__init__()
131
+ self.lstm = nn.LSTM(input_size=1, hidden_size=100, num_layers=2, batch_first=True, dropout=0.2, bidirectional=True)
132
+ self.fc = nn.Linear(200, 1)
 
 
 
 
 
 
 
133
 
134
  def forward(self, x):
135
+ h0 = torch.zeros(4, x.size(0), 100) # 2 directions × 2 layers = 4
136
+ c0 = torch.zeros(4, x.size(0), 100)
137
  out, _ = self.lstm(x, (h0, c0))
138
  return self.fc(out[:, -1, :])
139
  @st.cache_resource(ttl=3600)