Commit ccb58202 authored by lucas_miranda's avatar lucas_miranda
Browse files

Implemented SEQ2SEQ_VAEP (Variational AutoEncoding Predictor) in models.py

parent 54b08f18
......@@ -126,9 +126,9 @@ class KLDivergenceLayer(Layer):
mu, log_var = inputs
kl_batch = -0.5 * K.sum(1 + log_var - K.square(mu) - K.exp(log_var), axis=-1)
kL_batch = -0.5 * K.sum(1 + log_var - K.square(mu) - K.exp(log_var), axis=-1)
self.add_loss(K.mean(kl_batch), inputs=inputs)
self.add_loss(K.mean(kL_batch), inputs=inputs)
return inputs
......
......@@ -458,13 +458,19 @@ class SEQ_2_SEQ_VAEP:
x_decoded_mean = TimeDistributed(Dense(self.input_shape[2]))(generator)
# Define and instantiate predictor
predictor = Dense(self.ENCODING, activation='relu', kernel_initializer=he_uniform())(z)
predictor = Dense(
self.ENCODING, activation="relu", kernel_initializer=he_uniform()
)(z)
predictor = BatchNormalization()(predictor)
predictor = Dense(self.DENSE_2, activation='relu', kernel_initializer=he_uniform())(predictor)
predictor = Dense(
self.DENSE_2, activation="relu", kernel_initializer=he_uniform()
)(predictor)
predictor = BatchNormalization()(predictor)
predictor = Dense(self.DENSE_1, activation='relu', kernel_initializer=he_uniform())(predictor)
predictor = Dense(
self.DENSE_1, activation="relu", kernel_initializer=he_uniform()
)(predictor)
predictor = BatchNormalization()(predictor)
predictor = RepeatVector(self.input_shape[1]-1)(predictor)
predictor = RepeatVector(self.input_shape[1] - 1)(predictor)
predictor = Bidirectional(
LSTM(
self.LSTM_units_1,
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment