Skip to content
Snippets Groups Projects
Commit afb3a322 authored by Lucas Miranda's avatar Lucas Miranda
Browse files

Early stopping now uses the overall loss function instead of the intercomponent overlap

parent 2481cbb8
Branches
Tags
No related merge requests found
...@@ -275,7 +275,7 @@ class Entropy_regulariser(Layer): ...@@ -275,7 +275,7 @@ class Entropy_regulariser(Layer):
Identity layer that adds cluster weight entropy to the loss function Identity layer that adds cluster weight entropy to the loss function
""" """
def __init__(self, weight=0., *args, **kwargs): def __init__(self, weight=1., *args, **kwargs):
self.weight = weight self.weight = weight
super(Entropy_regulariser, self).__init__(*args, **kwargs) super(Entropy_regulariser, self).__init__(*args, **kwargs)
...@@ -292,6 +292,6 @@ class Entropy_regulariser(Layer): ...@@ -292,6 +292,6 @@ class Entropy_regulariser(Layer):
# Adds metric that monitors dead neurons in the latent space # Adds metric that monitors dead neurons in the latent space
self.add_metric(entropy, aggregation="mean", name="-weight_entropy") self.add_metric(entropy, aggregation="mean", name="-weight_entropy")
self.add_loss(self.weight * K.sum(entropy), inputs=[z]) #self.add_loss(self.weight * K.sum(entropy), inputs=[z])
return z return z
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment