Commit 76597d55 authored by lucas_miranda's avatar lucas_miranda
Browse files

Implemented Montecarlo Dropout custom layer

parent 7b3e74ce
......@@ -56,6 +56,10 @@ def compute_mmd(tensors):
# Custom layers for efficiency/losses
class MCDropout(tf.keras.layers.Dropout):
def call(self, inputs):
return super().call(inputs, training=True)
class DenseTranspose(Layer):
def __init__(self, dense, output_dim, activation=None, **kwargs):
self.dense = dense
......
......@@ -7,7 +7,7 @@ from tensorflow.keras.callbacks import LambdaCallback
from tensorflow.keras.constraints import UnitNorm
from tensorflow.keras.initializers import he_uniform, Orthogonal
from tensorflow.keras.layers import BatchNormalization, Bidirectional
from tensorflow.keras.layers import Dense, Dropout, LSTM
from tensorflow.keras.layers import Dense, LSTM
from tensorflow.keras.layers import RepeatVector, Reshape, TimeDistributed
from tensorflow.keras.losses import Huber
from tensorflow.keras.optimizers import Nadam
......@@ -125,7 +125,7 @@ class SEQ_2_SEQ_AE:
encoder.add(BatchNormalization())
encoder.add(Model_E3)
encoder.add(BatchNormalization())
encoder.add(Dropout(self.DROPOUT_RATE))
encoder.add(MCDropout(self.DROPOUT_RATE))
encoder.add(Model_E4)
encoder.add(BatchNormalization())
encoder.add(Model_E5)
......@@ -312,7 +312,7 @@ class SEQ_2_SEQ_GMVAE:
encoder = BatchNormalization()(encoder)
encoder = Model_E3(encoder)
encoder = BatchNormalization()(encoder)
encoder = Dropout(self.DROPOUT_RATE)(encoder)
encoder = MCDropout(self.DROPOUT_RATE)(encoder)
encoder = Model_E4(encoder)
encoder = BatchNormalization()(encoder)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment