Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Lucas Miranda
deepOF
Commits
71d92b26
Commit
71d92b26
authored
Jun 05, 2020
by
lucas_miranda
Browse files
Implemented KL and MMD warmup on SEQ2SEQ_VAEP in models.py
parent
a3a6bdcb
Changes
1
Hide whitespace changes
Inline
Side-by-side
source/model_utils.py
View file @
71d92b26
...
...
@@ -130,9 +130,9 @@ class KLDivergenceLayer(Layer):
def
call
(
self
,
inputs
,
**
kwargs
):
mu
,
log_var
=
inputs
kL_batch
=
-
0.5
*
K
.
sum
(
1
+
log_var
-
K
.
square
(
mu
)
-
K
.
exp
(
log_var
),
axis
=-
1
)
kL_batch
=
-
0.5
*
self
.
beta
*
K
.
sum
(
1
+
log_var
-
K
.
square
(
mu
)
-
K
.
exp
(
log_var
),
axis
=-
1
)
self
.
add_loss
(
self
.
beta
*
K
.
mean
(
kL_batch
),
inputs
=
inputs
)
self
.
add_loss
(
K
.
mean
(
kL_batch
),
inputs
=
inputs
)
self
.
add_metric
(
self
.
beta
,
aggregation
=
"mean"
,
name
=
"kl_rate"
)
return
inputs
...
...
@@ -155,9 +155,9 @@ class MMDiscrepancyLayer(Layer):
def
call
(
self
,
z
,
**
kwargs
):
true_samples
=
K
.
random_normal
(
K
.
shape
(
z
))
mmd_batch
=
compute_mmd
(
true_samples
,
z
)
mmd_batch
=
self
.
beta
*
compute_mmd
(
true_samples
,
z
)
self
.
add_loss
(
self
.
beta
*
K
.
mean
(
mmd_batch
),
inputs
=
z
)
self
.
add_loss
(
K
.
mean
(
mmd_batch
),
inputs
=
z
)
self
.
add_metric
(
self
.
beta
,
aggregation
=
"mean"
,
name
=
"mmd_rate"
)
return
z
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment