Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Lucas Miranda
deepOF
Commits
4a54a438
Commit
4a54a438
authored
Feb 10, 2021
by
lucas_miranda
Browse files
Refactored train_utils.py
parent
b51981dd
Pipeline
#93127
failed with stage
in 73 minutes and 59 seconds
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
deepof/train_utils.py
View file @
4a54a438
...
...
@@ -12,6 +12,7 @@ from datetime import date, datetime
from
kerastuner
import
BayesianOptimization
,
Hyperband
from
kerastuner
import
HyperParameters
from
kerastuner_tensorboard_logger
import
TensorBoardLogger
from
sklearn.metrics
import
roc_auc_score
from
tensorboard.plugins.hparams
import
api
as
hp
from
typing
import
Tuple
,
Union
,
Any
,
List
import
deepof.hypermodels
...
...
@@ -139,7 +140,7 @@ def get_callbacks(
return
callbacks
def
log_hyperparameters
(
phenotype_class
):
def
log_hyperparameters
(
phenotype_class
:
float
,
rec
:
str
):
"""Blueprint for hyperparameter and metric logging in tensorboard during hyperparameter tuning"""
logparams
=
[
...
...
@@ -163,7 +164,6 @@ def log_hyperparameters(phenotype_class):
),
]
rec
=
"reconstruction_"
if
phenotype_class
else
""
metrics
=
[
hp
.
Metric
(
"val_{}mae"
.
format
(
rec
),
display_name
=
"val_{}mae"
.
format
(
rec
)),
hp
.
Metric
(
"val_{}mse"
.
format
(
rec
),
display_name
=
"val_{}mse"
.
format
(
rec
)),
...
...
@@ -192,7 +192,16 @@ def log_hyperparameters(phenotype_class):
# noinspection PyUnboundLocalVariable
def
tensorboard_metric_logging
(
run_dir
:
str
,
hpms
:
Any
):
def
tensorboard_metric_logging
(
run_dir
:
str
,
hpms
:
Any
,
ae
:
Any
,
X_val
:
np
.
ndarray
,
y_val
:
np
.
ndarray
,
phenotype_class
:
float
,
predictor
:
float
,
rec
:
str
,
):
"""Autoencoder metric logging in tensorboard"""
output
=
ae
.
predict
(
X_val
)
...
...
@@ -262,7 +271,6 @@ def autoencoder_fitting(
tf
.
keras
.
backend
.
clear_session
()
# Defines what to log on tensorboard (useful for trying out different models)
logparam
=
{
"encoding"
:
encoding_size
,
"k"
:
n_components
,
...
...
@@ -287,8 +295,9 @@ def autoencoder_fitting(
cbacks
=
cbacks
[
1
:]
# Logs hyperparameters to tensorboard
rec
=
"reconstruction_"
if
phenotype_class
else
""
if
log_hparams
:
logparams
,
metrics
=
log_hyperparameters
(
phenotype_class
)
logparams
,
metrics
=
log_hyperparameters
(
phenotype_class
,
rec
)
with
tf
.
summary
.
create_file_writer
(
os
.
path
.
join
(
output_path
,
"hparams"
,
run_ID
)
...
...
@@ -411,6 +420,12 @@ def autoencoder_fitting(
tensorboard_metric_logging
(
os
.
path
.
join
(
output_path
,
"hparams"
,
run_ID
),
logparam
,
ae
,
X_val
,
y_val
,
phenotype_class
,
predictor
,
rec
,
)
return
return_list
...
...
tests/test_train_utils.py
View file @
4a54a438
...
...
@@ -9,12 +9,13 @@ Testing module for deepof.train_utils
"""
from
hypothesis
import
given
from
hypothesis
import
settings
,
reproduce_failure
from
hypothesis
import
settings
from
hypothesis
import
strategies
as
st
from
hypothesis.extra.numpy
import
arrays
import
deepof.data
import
deepof.model_utils
import
deepof.train_utils
import
numpy
as
np
import
os
import
tensorflow
as
tf
...
...
@@ -113,7 +114,8 @@ def test_autoencoder_fitting(
predictor
,
variational
,
):
preprocessed_data
=
(
X_train
,
[],
X_train
,
[])
y_train
=
np
.
round
(
np
.
random
.
uniform
(
0
,
1
,
X_train
.
shape
[
0
]))
preprocessed_data
=
(
X_train
,
y_train
,
X_train
,
y_train
)
prun
=
deepof
.
data
.
project
(
path
=
os
.
path
.
join
(
"."
,
"tests"
,
"test_examples"
,
"test_single_topview"
),
...
...
@@ -131,7 +133,7 @@ def test_autoencoder_fitting(
log_hparams
=
True
,
n_components
=
k
,
loss
=
loss
,
phenotype_class
=
0
,
phenotype_class
=
pheno_class
,
predictor
=
predictor
,
variational
=
variational
,
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment