deepof_experiments.smk 2.99 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
# @authors lucasmiranda42
# encoding: utf-8
# deepof_experiments

"""

Snakefile for data and imputation.
Execution: sbatch snakemake
Plot DAG: snakemake --snakefile deepof_experiments.smk --forceall --dag | dot -Tpdf > deepof_experiments_DAG.pdf
Plot rule graph: snakemake --snakefile deepof_experiments.smk --forceall --rulegraph | dot -Tpdf > deepof_experiments_RULEGRAPH.pdf

"""

14
import os
15

16
outpath = "/u/lucasmir/DLC/DLC_autoencoders/DeepOF/deepof/logs/"
17
losses = ["ELBO", "MMD", "ELBO+MMD"]
18
19
encodings = [2, 4, 6, 8, 10, 12, 14, 16]
cluster_numbers = [i+1 for i in range(10)]
20
21
22
23

rule deepof_experiments:
    input:
        expand(
24
            "/u/lucasmir/DLC/DLC_autoencoders/DeepOF/deepof/logs/dimension_and_loss_experiments/trained_weights/"
25
            "GMVAE_loss={loss}_encoding={encs}_k={k}_run_1_final_weights.h5",
26
27
            loss=losses,
            encs=encodings,
28
            k=cluster_numbers,
29
30
31
        ),


32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
rule coarse_hyperparameter_tuning:
    input:
        data_path="/u/lucasmir/DLC/DLC_models/deepof_single_topview/",
    output:
        trained_models=os.path.join(
            outpath,
            "coarse_hyperparameter_tuning/trained_weights/GMVAE_loss={loss}_encoding={encs}_run_1_final_weights.h5",
        ),
    shell:
        "pipenv run python -m deepof.train_model "
        "--train-path {input.data_path} "
        "--val-num 25 "
        "--components 10 "
        "--input-type coords "
        "--predictor 0 "
        "--variational True "
        "--loss {wildcards.loss} "
        "--kl-warmup 20 "
        "--mmd-warmup 20 "
        "--encoding-size 2 "
        "--batch-size 256 "
        "--window-size 11 "
        "--window-step 11 "
        "--exclude-bodyparts Tail_base,Tail_1,Tail_2,Tail_tip,Spine_2 "
        "--output-path {outpath}coarse_hyperparameter_tuning "
        "--hyperparameter-tuning hyperband "
        "--hpt-trials 3"

60
61
62
63
rule explore_encoding_dimension_and_loss_function:
    input:
        data_path="/u/lucasmir/DLC/DLC_models/deepof_single_topview/",
    output:
64
65
        trained_models=os.path.join(
            outpath,
66
            "dimension_and_loss_experiments/trained_weights/GMVAE_loss={loss}_encoding={encs}_k={k}_run_1_final_weights.h5",
67
        ),
68
    shell:
69
70
71
        "pipenv run python -m deepof.train_model "
        "--train-path {input.data_path} "
        "--val-num 10 "
72
        "--components {wildcards.k} "
73
74
75
76
77
78
79
80
81
82
83
84
85
        "--input-type coords "
        "--predictor 0 "
        "--variational True "
        "--loss {wildcards.loss} "
        "--kl-warmup 20 "
        "--mmd-warmup 20 "
        "--encoding-size {wildcards.encs} "
        "--batch-size 256 "
        "--window-size 11 "
        "--window-step 11 "
        "--exclude-bodyparts Tail_base,Tail_1,Tail_2,Tail_tip,Spine_2 "
        "--stability-check 3  "
        "--output-path {outpath}dimension_and_loss_experiments"
86
87
88
89
90
91
92
93
94
95
96
97
98
99


# rule explore_number_of_components:
# 	input:
# 	output:
# 	shell:
# rule explore_phenotype_prediction:
# 	input:
# 	output:
# 	shell:
# rule explore_predictor_branch:
# 	input:
# 	output:
# 	shell: