Skip to content

Commit 6a30ddc

Browse files
committed
Re-remove bidirectional RNN
Repeat of 76a566e 735e1c2 46f2df9
1 parent 98cbac7 commit 6a30ddc

File tree

2 files changed

+10
-30
lines changed

2 files changed

+10
-30
lines changed

examples/conf.yaml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@ data:
5555

5656
model:
5757
loss_scale_factor: 1.0
58-
use_bidirectional: false
5958
use_batch_norm: false
6059
torch: False
6160
shallow: True

plasma/models/builder.py

Lines changed: 10 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
Dense, Activation, Dropout, Lambda,
66
Reshape, Flatten, Permute, # RepeatVector
77
)
8-
from keras.layers import LSTM, SimpleRNN, Bidirectional, BatchNormalization
8+
from keras.layers import LSTM, SimpleRNN, BatchNormalization
99
from keras.layers.convolutional import Convolution1D
1010
from keras.layers.pooling import MaxPooling1D
1111
# from keras.utils.data_utils import get_file
@@ -82,7 +82,6 @@ def build_model(self, predict, custom_batch_size=None):
8282
conf = self.conf
8383
model_conf = conf['model']
8484
rnn_size = model_conf['rnn_size']
85-
use_bidirectional = model_conf['use_bidirectional']
8685
rnn_type = model_conf['rnn_type']
8786
regularization = model_conf['regularization']
8887
dense_regularization = model_conf['dense_regularization']
@@ -249,33 +248,15 @@ def slicer_output_shape(input_shape, indices):
249248
# pre_rnn_model.summary()
250249
x_input = Input(batch_shape=batch_input_shape)
251250
x_in = TimeDistributed(pre_rnn_model)(x_input)
252-
253-
if use_bidirectional:
254-
for _ in range(model_conf['rnn_layers']):
255-
x_in = Bidirectional(
256-
rnn_model(
257-
rnn_size,
258-
return_sequences=return_sequences,
259-
stateful=stateful,
260-
kernel_regularizer=l2(regularization),
261-
recurrent_regularizer=l2(regularization),
262-
bias_regularizer=l2(regularization),
263-
dropout=dropout_prob,
264-
recurrent_dropout=dropout_prob))(x_in)
265-
x_in = Dropout(dropout_prob)(x_in)
266-
else:
267-
for _ in range(model_conf['rnn_layers']):
268-
x_in = rnn_model(
269-
rnn_size,
270-
return_sequences=return_sequences,
271-
# batch_input_shape=batch_input_shape,
272-
stateful=stateful,
273-
kernel_regularizer=l2(regularization),
274-
recurrent_regularizer=l2(regularization),
275-
bias_regularizer=l2(regularization),
276-
dropout=dropout_prob,
277-
recurrent_dropout=dropout_prob)(x_in)
278-
x_in = Dropout(dropout_prob)(x_in)
251+
for _ in range(model_conf['rnn_layers']):
252+
x_in = rnn_model(
253+
rnn_size, return_sequences=return_sequences,
254+
# batch_input_shape=batch_input_shape,
255+
stateful=stateful, kernel_regularizer=l2(regularization),
256+
recurrent_regularizer=l2(regularization),
257+
bias_regularizer=l2(regularization), dropout=dropout_prob,
258+
recurrent_dropout=dropout_prob)(x_in)
259+
x_in = Dropout(dropout_prob)(x_in)
279260
if return_sequences:
280261
# x_out = TimeDistributed(Dense(100,activation='tanh')) (x_in)
281262
x_out = TimeDistributed(

0 commit comments

Comments
 (0)