Skip to content
Snippets Groups Projects
Commit 3da71241 authored by Lucas Miranda's avatar Lucas Miranda
Browse files

Parameterised path in main.ipynb

parent 73f69db2
Branches
Tags
No related merge requests found
%% Cell type:code id: tags:
``` python
%load_ext autoreload
%autoreload 2
import warnings
warnings.filterwarnings("ignore")
```
%% Cell type:code id: tags:
``` python
#from source.utils import *
from source.preprocess import *
import pickle
import matplotlib.pyplot as plt
import pandas as pd
from collections import defaultdict
from tqdm import tqdm_notebook as tqdm
```
%% Cell type:code id: tags:parameters
``` python
path = "../../Desktop/DLC_social_1/"
```
%% Cell type:markdown id: tags:
# Set up and design the project
%% Cell type:code id: tags:
``` python
with open('{}DLC_social_1_exp_conditions.pickle'.format(path), 'rb') as handle:
Treatment_dict = pickle.load(handle)
```
%% Cell type:code id: tags:
``` python
#Which angles to compute?
bp_dict = {'B_Nose':['B_Left_ear','B_Right_ear'],
'B_Left_ear':['B_Nose','B_Right_ear','B_Center','B_Left_flank'],
'B_Right_ear':['B_Nose','B_Left_ear','B_Center','B_Right_flank'],
'B_Center':['B_Left_ear','B_Right_ear','B_Left_flank','B_Right_flank','B_Tail_base'],
'B_Left_flank':['B_Left_ear','B_Center','B_Tail_base'],
'B_Right_flank':['B_Right_ear','B_Center','B_Tail_base'],
'B_Tail_base':['B_Center','B_Left_flank','B_Right_flank']}
```
%% Cell type:code id: tags:
``` python
%%time
DLC_social_1 = project(path=path,#Path where to find the required files
smooth_alpha=0.85, #Alpha value for exponentially weighted smoothing
distances=['B_Center','B_Nose','B_Left_ear','B_Right_ear','B_Left_flank',
'B_Right_flank','B_Tail_base'],
ego=False,
angles=True,
connectivity=bp_dict,
arena='circular', #Type of arena used in the experiments
arena_dims=[380], #Dimensions of the arena. Just one if it's circular
video_format='.mp4',
table_format='.h5',
exp_conditions=Treatment_dict)
```
%% Cell type:markdown id: tags:
# Run project
%% Cell type:code id: tags:
``` python
%%time
DLC_social_1_coords = DLC_social_1.run(verbose=True)
print(DLC_social_1_coords)
type(DLC_social_1_coords)
```
%% Cell type:markdown id: tags:
# Generate coords
%% Cell type:code id: tags:
``` python
%%time
ptest = DLC_social_1_coords.get_coords(center=True, polar=False, speed=0, length='00:10:00')
ptest._type
```
%% Cell type:code id: tags:
``` python
%%time
dtest = DLC_social_1_coords.get_distances(speed=0, length='00:10:00')
dtest._type
```
%% Cell type:code id: tags:
``` python
%%time
atest = DLC_social_1_coords.get_angles(degrees=True, speed=0, length='00:10:00')
atest._type
```
%% Cell type:markdown id: tags:
# Visualization playground
%% Cell type:code id: tags:
``` python
#ptest.plot_heatmaps(['B_Center', 'W_Center'], i=1)
```
%% Cell type:code id: tags:
``` python
#Plot animation of trajectory over time with different smoothings
#plt.plot(ptest['Day2Test13DLC']['B_Center'].iloc[:5000]['x'],
# ptest['Day2Test13DLC']['B_Center'].iloc[:5000]['y'], label='alpha=0.85')
#plt.xlabel('x')
#plt.ylabel('y')
#plt.title('Mouse Center Trajectory using different exponential smoothings')
#plt.legend()
#plt.show()
```
%% Cell type:markdown id: tags:
# Dimensionality reduction playground
%% Cell type:code id: tags:
``` python
#pca = ptest.pca(4, 1000)
```
%% Cell type:code id: tags:
``` python
#plt.scatter(*pca[0].T)
#plt.show()
```
%% Cell type:markdown id: tags:
# Preprocessing playground
%% Cell type:code id: tags:
``` python
mtest = merge_tables(DLC_social_1_coords.get_coords(center=True, polar=True, length='00:10:00'))#,
# DLC_social_1_coords.get_distances(speed=0, length='00:10:00'),
# DLC_social_1_coords.get_angles(degrees=True, speed=0, length='00:10:00'))
```
%% Cell type:code id: tags:
``` python
#pptest = mtest.preprocess(window_size=51, filter='gaussian', sigma=10, shift=20)
```
%% Cell type:code id: tags:
``` python
pttest = mtest.preprocess(window_size=51, window_step=10, filter=None)
pttest.shape
```
%% Cell type:code id: tags:
``` python
#plt.plot(pttest[2,:,2], label='normal')
#plt.plot(pptest[2,:,2], label='gaussian')
#plt.legend()
#plt.show()
```
%% Cell type:markdown id: tags:
# Trained models playground
%% Cell type:markdown id: tags:
### Seq 2 seq Variational Auto Encoder
%% Cell type:code id: tags:
``` python
from datetime import datetime
from tensorflow.keras import Input, Model, Sequential
from tensorflow.keras.constraints import UnitNorm
from tensorflow.keras.layers import Bidirectional, Dense, Dropout
from tensorflow.keras.layers import Lambda, LSTM
from tensorflow.keras.layers import RepeatVector, TimeDistributed
from tensorflow.keras.losses import Huber
from tensorflow.keras.optimizers import Adam
from source.model_utils import *
import keras as k
import tensorflow as tf
```
%% Cell type:code id: tags:
``` python
NAME = 'Baseline_VAE_Unidirectional'
NAME = 'Baseline_AE_GRU'
log_dir = os.path.abspath(
"logs/fit/{}_{}".format(NAME, datetime.now().strftime("%Y%m%d-%H%M%S"))
)
tensorboard_callback = k.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
```
%% Cell type:code id: tags:
``` python
from source.models import SEQ_2_SEQ_AE, SEQ_2_SEQ_VAE
```
%% Cell type:code id: tags:
``` python
#encoder, decoder, ae = SEQ_2_SEQ_AE(pttest.shape).build()
encoder, decoder, ae = SEQ_2_SEQ_AE(pttest.shape).build()
```
%% Cell type:code id: tags:
``` python
encoder, generator, vae = SEQ_2_SEQ_VAE(pttest.shape).build()
#encoder, generator, vae = SEQ_2_SEQ_VAE(pttest.shape).build()
```
%% Cell type:code id: tags:
``` python
#ae.summary()
#vae.summary()
```
%% Cell type:code id: tags:
``` python
pttrain = pttest[:-1500]
pttest = pttest[-1500:]
```
%% Cell type:code id: tags:
``` python
tf.config.experimental_run_functions_eagerly(False)
history = vae.fit(pttrain, pttrain, epochs=50, batch_size=256, verbose=1, validation_data=(pttest, pttest),
history = ae.fit(pttrain, pttrain, epochs=50, batch_size=256, verbose=1, validation_data=(pttest, pttest),
callbacks=[tensorboard_callback])
```
%% Cell type:code id: tags:
``` python
```
......
......@@ -3,7 +3,7 @@
from tensorflow.keras import Input, Model, Sequential
from tensorflow.keras.constraints import UnitNorm
from tensorflow.keras.layers import Bidirectional, Dense, Dropout
from tensorflow.keras.layers import Lambda, LSTM
from tensorflow.keras.layers import Lambda, LSTM, GRU
from tensorflow.keras.layers import RepeatVector, TimeDistributed
from tensorflow.keras.losses import Huber
from tensorflow.keras.optimizers import Adam
......@@ -42,16 +42,16 @@ class SEQ_2_SEQ_AE:
padding="causal",
activation="relu",
)
Model_E1 = (
LSTM(
Model_E1 = Bidirectional(
GRU(
self.LSTM_units_1,
activation="tanh",
return_sequences=True,
kernel_constraint=UnitNorm(axis=0),
)
)
Model_E2 = (
LSTM(
Model_E2 = Bidirectional(
GRU(
self.LSTM_units_2,
activation="tanh",
return_sequences=False,
......@@ -72,16 +72,16 @@ class SEQ_2_SEQ_AE:
)
# Decoder layers
Model_D4 = (
LSTM(
Model_D4 = Bidirectional(
GRU(
self.LSTM_units_1,
activation="tanh",
return_sequences=True,
kernel_constraint=UnitNorm(axis=1),
)
)
Model_D5 = (
LSTM(
Model_D5 = Bidirectional(
GRU(
self.LSTM_units_1,
activation="sigmoid",
return_sequences=True,
......@@ -158,16 +158,16 @@ class SEQ_2_SEQ_VAE:
padding="causal",
activation="relu",
)
Model_E1 = (
LSTM(
Model_E1 = Bidirectional(
GRU(
self.LSTM_units_1,
activation="tanh",
return_sequences=True,
kernel_constraint=UnitNorm(axis=0),
)
)
Model_E2 = (
LSTM(
Model_E2 = Bidirectional(
GRU(
self.LSTM_units_2,
activation="tanh",
return_sequences=False,
......@@ -193,16 +193,16 @@ class SEQ_2_SEQ_VAE:
Model_D1 = DenseTranspose(Model_E4, activation="relu", output_dim=self.DENSE_2)
Model_D2 = DenseTranspose(Model_E3, activation="relu", output_dim=self.DENSE_1)
Model_D3 = RepeatVector(self.input_shape[1])
Model_D4 = (
LSTM(
Model_D4 = Bidirectional(
GRU(
self.LSTM_units_1,
activation="tanh",
return_sequences=True,
kernel_constraint=UnitNorm(axis=1),
)
)
Model_D5 = (
LSTM(
Model_D5 = Bidirectional(
GRU(
self.LSTM_units_1,
activation="sigmoid",
return_sequences=True,
......@@ -280,8 +280,8 @@ class SEQ_2_SEQ_MMVAE:
# TODO:
# - Baseline networks (done!)
# - Initial Convnet switch (done!)
# - Bidirectional LSTM switches
# - Bidirectional LSTM switches (done!)
# - Change LSTMs for GRU
# - Tied/Untied weights
# - VAE loss function (though this should be analysed later on taking the encodings into account)
# - Tied/Untied weights!
# - Smaller input sliding window (10-15 frames)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment