Commit 0361a1bb authored by lucas_miranda's avatar lucas_miranda
Browse files

Hyperparameter tuning now fully working in both models!

parent b60427e9
Pipeline #83813 passed with stage
in 44 minutes and 39 seconds
......@@ -62,12 +62,8 @@ class SEQ_2_SEQ_GMVAE(HyperModel):
def __init__(
self,
input_shape,
CONV_filters=256,
DENSE_2=64,
entropy_reg_weight=0.0,
huber_delta=100,
LSTM_units_1=256,
LSTM_units_2=128,
huber_delta=100.0,
kl_warmup_epochs=0,
learn_rate=1e-3,
loss="ELBO+MMD",
......@@ -79,13 +75,8 @@ class SEQ_2_SEQ_GMVAE(HyperModel):
):
super().__init__()
self.input_shape = input_shape
self.CONV_filters = CONV_filters
self.DENSE_1 = LSTM_units_2
self.DENSE_2 = DENSE_2
self.entropy_reg_weight = entropy_reg_weight
self.huber_delta = huber_delta
self.LSTM_units_1 = LSTM_units_1
self.LSTM_units_2 = LSTM_units_2
self.kl_warmup = kl_warmup_epochs
self.kl_warmup_callback = None
self.learn_rate = learn_rate
......
......@@ -264,8 +264,8 @@ input_dict_train = {
print("Preprocessing data...")
preprocessed = batch_preprocess(input_dict_train[input_type])
# Get training and validation sets
X_train = tf.cast(preprocessed[0], tf.float32)
X_val = tf.cast(preprocessed[1], tf.float32)
X_train = preprocessed[0]
X_val = preprocessed[1]
print("Done!")
# Proceed with training mode. Fit autoencoder with the same parameters,
......
......@@ -160,10 +160,10 @@ def tune_search(
overlap_loss=overlap_loss,
)
# if "ELBO" in loss and kl_wu > 0:
# callbacks.append(hypermodel.kl_warmup_callback)
# if "MMD" in loss and mmd_wu > 0:
# callbacks.append(hypermodel.mmd_warmup_callback)
if "ELBO" in loss and kl_wu > 0:
callbacks.append(hypermodel.kl_warmup_callback)
if "MMD" in loss and mmd_wu > 0:
callbacks.append(hypermodel.mmd_warmup_callback)
else:
return False
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment