Commit e858fc6e authored by lucas_miranda's avatar lucas_miranda
Browse files

Modified GMVAEP - GRUs instead of LSTMs, stricted clipping, less deep, l1...

Modified GMVAEP - GRUs instead of LSTMs, stricted clipping, less deep, l1 regularization in cluster means, uniform initializer of variances
parent 425530fc
Pipeline #102893 passed with stages
in 21 minutes and 32 seconds
...@@ -232,61 +232,6 @@ class one_cycle_scheduler(tf.keras.callbacks.Callback): ...@@ -232,61 +232,6 @@ class one_cycle_scheduler(tf.keras.callbacks.Callback):
) )
class uncorrelated_features_constraint(Constraint):
"""
tf.keras.constraints.Constraint subclass that forces a layer to have uncorrelated features.
Useful, among others, for auto encoder bottleneck layers
"""
def __init__(self, encoding_dim, weightage=1.0):
self.encoding_dim = encoding_dim
self.weightage = weightage
def get_config(self): # pragma: no cover
"""Updates Constraint metadata"""
config = super().get_config().copy()
config.update({"encoding_dim": self.encoding_dim, "weightage": self.weightage})
return config
def get_covariance(self, x):
"""Computes the covariance of the elements of the passed layer"""
x_centered_list = []
for i in range(self.encoding_dim):
x_centered_list.append(x[:, i] - K.mean(x[:, i]))
x_centered = tf.stack(x_centered_list)
covariance = K.dot(x_centered, K.transpose(x_centered)) / tf.cast(
x_centered.get_shape()[0], tf.float32
)
return covariance
# Constraint penalty
# noinspection PyUnusedLocal
def uncorrelated_feature(self, x):
"""Adds a penalty on feature correlation, forcing more independent sets of weights"""
if self.encoding_dim <= 1: # pragma: no cover
return 0.0
else:
output = K.sum(
K.square(
self.covariance
- tf.math.multiply(self.covariance, tf.eye(self.encoding_dim))
)
)
return output
def __call__(self, x):
self.covariance = self.get_covariance(x)
return self.weightage * self.uncorrelated_feature(x)
# Custom Layers # Custom Layers
class MCDropout(tf.keras.layers.Dropout): class MCDropout(tf.keras.layers.Dropout):
"""Equivalent to tf.keras.layers.Dropout, but with training mode enabled at prediction time. """Equivalent to tf.keras.layers.Dropout, but with training mode enabled at prediction time.
......
...@@ -139,38 +139,6 @@ def test_one_cycle_scheduler(): ...@@ -139,38 +139,6 @@ def test_one_cycle_scheduler():
assert onecycle.history["lr"][4] > onecycle.history["lr"][-1] assert onecycle.history["lr"][4] > onecycle.history["lr"][-1]
# noinspection PyUnresolvedReferences
def test_uncorrelated_features_constraint():
X = np.random.uniform(0, 10, [1500, 5])
y = np.random.randint(0, 2, [1500, 1])
correlations = []
for w in range(2):
test_model = tf.keras.Sequential()
test_model.add(
tf.keras.layers.Dense(
10,
kernel_constraint=tf.keras.constraints.UnitNorm(axis=1),
activity_regularizer=deepof.model_utils.uncorrelated_features_constraint(
2, weightage=w
),
)
)
test_model.compile(
loss=tf.keras.losses.binary_crossentropy,
optimizer=tf.keras.optimizers.SGD(),
)
fit = test_model.fit(X, y, epochs=25, batch_size=100, verbose=0)
assert isinstance(fit, tf.keras.callbacks.History)
correlations.append(np.mean(np.corrcoef(test_model.get_weights()[0])))
assert correlations[0] > correlations[1]
# noinspection PyUnresolvedReferences # noinspection PyUnresolvedReferences
def test_MCDropout(): def test_MCDropout():
X = np.random.uniform(0, 10, [1500, 5]) X = np.random.uniform(0, 10, [1500, 5])
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment