Update tasks/text.py
Browse files- tasks/text.py +2 -3
tasks/text.py
CHANGED
@@ -129,8 +129,7 @@ async def evaluate_text(request: TextEvaluationRequest):
|
|
129 |
# score shape == (batch_size, max_length, 1)
|
130 |
# we get 1 at the last axis because we are applying score to self.V
|
131 |
# the shape of the tensor before applying self.V is (batch_size, max_length, units)
|
132 |
-
score = tf.nn.tanh(
|
133 |
-
self.W1(features) + self.W2(hidden_with_time_axis))
|
134 |
|
135 |
# attention_weights shape == (batch_size, max_length, 1)
|
136 |
attention_weights = tf.nn.softmax(self.V(score), axis=1)
|
@@ -151,7 +150,7 @@ async def evaluate_text(request: TextEvaluationRequest):
|
|
151 |
|
152 |
(lstm, forward_h, forward_c, backward_h, backward_c) = Bidirectional(LSTM(RNN_CELL_SIZE, return_sequences=True, return_state=True), name="bi_lstm_1")(lstm)
|
153 |
|
154 |
-
|
155 |
state_c = Concatenate()([forward_c, backward_c])
|
156 |
|
157 |
context_vector, attention_weights = Attention(10)(lstm, state_h)
|
|
|
129 |
# score shape == (batch_size, max_length, 1)
|
130 |
# we get 1 at the last axis because we are applying score to self.V
|
131 |
# the shape of the tensor before applying self.V is (batch_size, max_length, units)
|
132 |
+
score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis))
|
|
|
133 |
|
134 |
# attention_weights shape == (batch_size, max_length, 1)
|
135 |
attention_weights = tf.nn.softmax(self.V(score), axis=1)
|
|
|
150 |
|
151 |
(lstm, forward_h, forward_c, backward_h, backward_c) = Bidirectional(LSTM(RNN_CELL_SIZE, return_sequences=True, return_state=True), name="bi_lstm_1")(lstm)
|
152 |
|
153 |
+
state_h = Concatenate()([forward_h, backward_h])
|
154 |
state_c = Concatenate()([forward_c, backward_c])
|
155 |
|
156 |
context_vector, attention_weights = Attention(10)(lstm, state_h)
|