deepof_experiments.smk 2.73 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
# @authors lucasmiranda42
# encoding: utf-8
# deepof_experiments

"""

Snakefile for data and imputation.
Execution: sbatch snakemake
Plot DAG: snakemake --snakefile deepof_experiments.smk --forceall --dag | dot -Tpdf > deepof_experiments_DAG.pdf
Plot rule graph: snakemake --snakefile deepof_experiments.smk --forceall --rulegraph | dot -Tpdf > deepof_experiments_RULEGRAPH.pdf

"""

14
import os
15

16
outpath = "/u/lucasmir/DLC/DLC_autoencoders/DeepOF/deepof/logs/"
17
losses = ["ELBO"]#, "MMD", "ELBO+MMD"]
18
19
encodings = [2, 4, 6, 8, 10, 12, 14, 16]
cluster_numbers = [1, 5, 10, 15, 20]
20
21
22
23

rule deepof_experiments:
    input:
        expand(
24
            "/u/lucasmir/DLC/DLC_autoencoders/DeepOF/deepof/logs/dimension_and_loss_experiments/trained_weights/"
25
            "GMVAE_loss={loss}_encoding={encs}_k={k}_run_1_final_weights.h5",
26
27
            loss=losses,
            encs=encodings,
28
            k=cluster_numbers,
29
30
31
        ),


32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
# rule coarse_hyperparameter_tuning:
#     input:
#         data_path="/u/lucasmir/DLC/DLC_models/deepof_single_topview/",
#     output:
#         trained_models=os.path.join(
#             outpath,
#             "coarse_hyperparameter_tuning/trained_weights/GMVAE_loss={loss}_encoding={encs}_run_1_final_weights.h5",
#         ),
#     shell:
#         "pipenv run python -m deepof.train_model "
#         "--train-path {input.data_path} "
#         "--val-num 25 "
#         "--components 10 "
#         "--input-type coords "
#         "--predictor 0 "
#         "--variational True "
#         "--loss {wildcards.loss} "
#         "--kl-warmup 20 "
#         "--mmd-warmup 20 "
#         "--encoding-size 2 "
#         "--batch-size 256 "
#         "--window-size 11 "
#         "--window-step 11 "
#         "--output-path {outpath}coarse_hyperparameter_tuning "
#         "--hyperparameter-tuning hyperband "
#         "--hpt-trials 3"
58

59
60
61
62
rule explore_encoding_dimension_and_loss_function:
    input:
        data_path="/u/lucasmir/DLC/DLC_models/deepof_single_topview/",
    output:
63
64
        trained_models=os.path.join(
            outpath,
65
            "dimension_and_loss_experiments/trained_weights/GMVAE_loss={loss}_encoding={encs}_k={k}_run_1_final_weights.h5",
66
        ),
67
    shell:
68
69
        "pipenv run python -m deepof.train_model "
        "--train-path {input.data_path} "
70
        "--val-num 5 "
71
        "--components {wildcards.k} "
72
73
74
75
        "--input-type coords "
        "--predictor 0 "
        "--variational True "
        "--loss {wildcards.loss} "
76
77
        "--kl-warmup 20 "
        "--mmd-warmup 20 "
78
        "--montecarlo-kl 10 "
79
80
81
82
83
84
        "--encoding-size {wildcards.encs} "
        "--batch-size 256 "
        "--window-size 11 "
        "--window-step 11 "
        "--stability-check 3  "
        "--output-path {outpath}dimension_and_loss_experiments"