Commit 6ab6fbef authored by lucas_miranda's avatar lucas_miranda
Browse files

Added tests to test_utils.py

parent 57bd2164
...@@ -23,11 +23,21 @@ from tqdm import tqdm_notebook as tqdm ...@@ -23,11 +23,21 @@ from tqdm import tqdm_notebook as tqdm
# QUALITY CONTROL AND PREPROCESSING # # QUALITY CONTROL AND PREPROCESSING #
def likelihood_qc(dframe, threshold=0.9): def likelihood_qc(dframe: pd.DataFrame, threshold: float = 0.9) -> np.array:
"""Returns only rows where all lilelihoods are above a specified threshold""" """Returns a DataFrame filtered dataframe, keeping only the rows entirely above the threshold.
Parameters:
- dframe (pandas.DataFrame): DeepLabCut output, with positions over time and associated likelihhod
- threshold (float): minimum acceptable confidence
Returns:
- filt_mask (np.array): mask on the rows of dframe"""
Likes = np.array([dframe[i]["likelihood"] for i in list(dframe.columns.levels[0])]) Likes = np.array([dframe[i]["likelihood"] for i in list(dframe.columns.levels[0])])
Likes = np.nan_to_num(Likes, nan=1.0) Likes = np.nan_to_num(Likes, nan=1.0)
return np.all(Likes > threshold, axis=0) filt_mask = np.all(Likes > threshold, axis=0)
return filt_mask
def bp2polar(tab: pd.DataFrame) -> pd.DataFrame: def bp2polar(tab: pd.DataFrame) -> pd.DataFrame:
...@@ -244,10 +254,9 @@ def rolling_window(a: np.array, window_size: int, window_step: int) -> np.array: ...@@ -244,10 +254,9 @@ def rolling_window(a: np.array, window_size: int, window_step: int) -> np.array:
def smooth_mult_trajectory(series: np.array, alpha: float = 0.15) -> np.array: def smooth_mult_trajectory(series: np.array, alpha: float = 0.15) -> np.array:
"""Returns a smooths a trajectory using exponentially weighted averages """Returns a smooths a trajectory using exponentially weighted averages
Parameters: Parameters: - series (numpyp.array): 1D trajectory array with N (instances) - alpha (float): 0 <= alpha <= 1;
- series (np.array): 2D trajectory array with N (instances) * m (features) indicates the inverse weight assigned to previous observations. Higher (alpha~1) indicates less smoothing; lower
- alpha (float): 0 <= alpha <= 1; indicates the weight assigned to the current observation. indicates more (alpha~0)
higher (alpha~1) indicates less smoothing; lower indicates more (alpha~0)
Returns: Returns:
- smoothed_series (np.array): smoothed version of the input, with equal shape""" - smoothed_series (np.array): smoothed version of the input, with equal shape"""
...@@ -260,7 +269,8 @@ def smooth_mult_trajectory(series: np.array, alpha: float = 0.15) -> np.array: ...@@ -260,7 +269,8 @@ def smooth_mult_trajectory(series: np.array, alpha: float = 0.15) -> np.array:
return smoothed_series return smoothed_series
##### IMAGE/VIDEO PROCESSING FUNCTIONS #####
# IMAGE/VIDEO PROCESSING FUNCTIONS #
def index_frames(video_list, sample=False, index=0, pkl=False): def index_frames(video_list, sample=False, index=0, pkl=False):
...@@ -286,7 +296,8 @@ def index_frames(video_list, sample=False, index=0, pkl=False): ...@@ -286,7 +296,8 @@ def index_frames(video_list, sample=False, index=0, pkl=False):
return True return True
##### BEHAVIOUR RECOGNITION FUNCTIONS #####
# BEHAVIOUR RECOGNITION FUNCTIONS #
# Nose to Nose contact # Nose to Nose contact
......
...@@ -4,14 +4,54 @@ import sys ...@@ -4,14 +4,54 @@ import sys
sys.path.append("../") sys.path.append("../")
from hypothesis import given from hypothesis import given
from hypothesis import settings
from hypothesis import strategies as st from hypothesis import strategies as st
from hypothesis.extra.numpy import arrays from hypothesis.extra.numpy import arrays
from hypothesis.extra.pandas import range_indexes, columns, data_frames from hypothesis.extra.pandas import range_indexes, columns, data_frames
from itertools import combinations
from scipy.spatial import distance from scipy.spatial import distance
from source.utils import * from source.utils import *
@given(
mult=st.integers(min_value=1, max_value=10),
dframe=data_frames(
index=range_indexes(min_size=1),
columns=columns(["X", "y", "likelihood"], dtype=float),
rows=st.tuples(
st.floats(
min_value=0, max_value=1000, allow_nan=False, allow_infinity=False
),
st.floats(
min_value=0, max_value=1000, allow_nan=False, allow_infinity=False
),
st.floats(
min_value=0.01, max_value=1.0, allow_nan=False, allow_infinity=False
),
),
),
threshold=st.data(),
)
def test_likelihood_qc(mult, dframe, threshold):
thresh1 = threshold.draw(st.floats(min_value=0.1, max_value=1.0, allow_nan=False))
thresh2 = threshold.draw(
st.floats(min_value=thresh1, max_value=1.0, allow_nan=False)
)
dframe = pd.concat([dframe] * mult, axis=0)
idx = pd.MultiIndex.from_product(
[list(dframe.columns[: len(dframe.columns) // 3]), ["X", "y", "likelihood"]],
names=["bodyparts", "coords"],
)
dframe.columns = idx
filt1 = likelihood_qc(dframe, thresh1)
filt2 = likelihood_qc(dframe, thresh2)
assert np.sum(filt1) <= dframe.shape[0]
assert np.sum(filt2) <= dframe.shape[0]
assert np.sum(filt1) >= np.sum(filt2)
@given( @given(
tab=data_frames( tab=data_frames(
index=range_indexes(min_size=1), index=range_indexes(min_size=1),
...@@ -119,7 +159,6 @@ def test_bpart_distance(cordarray): ...@@ -119,7 +159,6 @@ def test_bpart_distance(cordarray):
), ),
) )
def test_angle(abc): def test_angle(abc):
a, b, c = abc a, b, c = abc
angles = [] angles = []
...@@ -164,7 +203,6 @@ def test_angle_trio(array): ...@@ -164,7 +203,6 @@ def test_angle_trio(array):
) )
) )
def test_rotate(p): def test_rotate(p):
assert np.allclose(rotate(p, 2 * np.pi), p) assert np.allclose(rotate(p, 2 * np.pi), p)
assert np.allclose(rotate(p, np.pi), -p) assert np.allclose(rotate(p, np.pi), -p)
...@@ -195,7 +233,10 @@ def test_align_trajectories(data): ...@@ -195,7 +233,10 @@ def test_align_trajectories(data):
@given(a=arrays(dtype=bool, shape=st.tuples(st.integers(min_value=3, max_value=1000)))) @given(a=arrays(dtype=bool, shape=st.tuples(st.integers(min_value=3, max_value=1000))))
def test_smooth_boolean_array(a): def test_smooth_boolean_array(a):
smooth = smooth_boolean_array(a) smooth = smooth_boolean_array(a)
trans = lambda x: sum([i + 1 != i for i in range(x.shape[0] - 1)])
def trans(x):
return sum([i + 1 != i for i in range(x.shape[0] - 1)])
assert trans(a) >= trans(smooth) assert trans(a) >= trans(smooth)
...@@ -222,3 +263,37 @@ def test_rolling_window(a, window): ...@@ -222,3 +263,37 @@ def test_rolling_window(a, window):
assert len(rolled_shape) == len(a.shape) + 1 assert len(rolled_shape) == len(a.shape) + 1
assert rolled_shape[1] == window_size assert rolled_shape[1] == window_size
@settings(deadline=None)
@given(
alpha=st.data(),
series=arrays(
dtype=float,
shape=st.tuples(st.integers(min_value=10, max_value=1000),),
elements=st.floats(
min_value=1.0, max_value=1.0, allow_nan=False, allow_infinity=False
),
),
)
def test_smooth_mult_trajectory(alpha, series):
alpha1 = alpha.draw(
st.floats(min_value=0.1, max_value=1.0, allow_nan=False, allow_infinity=False)
)
alpha2 = alpha.draw(
st.floats(
min_value=alpha1, max_value=1.0, allow_nan=False, allow_infinity=False
)
)
series *= +np.random.normal(0, 1, len(series))
smoothed1 = smooth_mult_trajectory(series, alpha1)
smoothed2 = smooth_mult_trajectory(series, alpha2)
def autocorr(x, t=1):
return np.corrcoef(np.array([x[:-t], x[t:]]))[0, 1]
assert autocorr(smoothed1) >= autocorr(series)
assert autocorr(smoothed2) >= autocorr(series)
assert autocorr(smoothed2) <= autocorr(smoothed1)
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment