Commit b0b65f83 authored by lucas_miranda's avatar lucas_miranda
Browse files

Added encoding size as a CL parameter in train_model.py

parent 486fe222
Pipeline #88511 canceled with stage
in 11 minutes and 26 seconds
......@@ -170,7 +170,7 @@ class SEQ_2_SEQ_GMVAE(HyperModel):
gmvaep, kl_warmup_callback, mmd_warmup_callback = deepof.models.SEQ_2_SEQ_GMVAE(
architecture_hparams={
"bidirectional_merge": "concat",
"bidirectional_merge": "ave",
"clipvalue": clipvalue,
"dense_activation": dense_activation,
"dense_layers_per_branch": dense_layers_per_branch,
......
......@@ -248,9 +248,9 @@ class SEQ_2_SEQ_GMVAE:
entropy_reg_weight: float = 0.0,
huber_delta: float = 1.0,
initialiser_iters: int = int(1),
kl_warmup_epochs: int = 0,
kl_warmup_epochs: int = 20,
loss: str = "ELBO+MMD",
mmd_warmup_epochs: int = 0,
mmd_warmup_epochs: int = 20,
number_of_components: int = 1,
overlap_loss: float = False,
phenotype_prediction: float = 0.0,
......
......@@ -32,12 +32,15 @@ def load_hparams(hparams):
hparams = pickle.load(handle)
else:
hparams = {
"units_conv": 256,
"units_lstm": 256,
"units_dense2": 64,
"dropout_rate": 0.25,
"encoding": 16,
"bidirectional_merge": "ave",
"clipvalue": 1.0,
"dense_activation": "relu",
"dense_layers_per_branch": 1,
"dropout_rate": 1e-3,
"learning_rate": 1e-3,
"units_conv": 160,
"units_dense2": 120,
"units_lstm": 300,
}
return hparams
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment