Skip to content
Snippets Groups Projects
Commit 2481cbb8 authored by Lucas Miranda's avatar Lucas Miranda
Browse files

Early stopping now uses the overall loss function instead of the intercomponent overlap

parent ae848b4f
No related branches found
No related tags found
No related merge requests found
...@@ -275,7 +275,7 @@ class Entropy_regulariser(Layer): ...@@ -275,7 +275,7 @@ class Entropy_regulariser(Layer):
Identity layer that adds cluster weight entropy to the loss function Identity layer that adds cluster weight entropy to the loss function
""" """
def __init__(self, weight=1., *args, **kwargs): def __init__(self, weight=0., *args, **kwargs):
self.weight = weight self.weight = weight
super(Entropy_regulariser, self).__init__(*args, **kwargs) super(Entropy_regulariser, self).__init__(*args, **kwargs)
...@@ -290,7 +290,7 @@ class Entropy_regulariser(Layer): ...@@ -290,7 +290,7 @@ class Entropy_regulariser(Layer):
) )
# Adds metric that monitors dead neurons in the latent space # Adds metric that monitors dead neurons in the latent space
self.add_metric(-entropy, aggregation="mean", name="weight_entropy") self.add_metric(entropy, aggregation="mean", name="-weight_entropy")
self.add_loss(self.weight * K.sum(entropy), inputs=[z]) self.add_loss(self.weight * K.sum(entropy), inputs=[z])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment