diff --git a/source/utils.py b/source/utils.py
index 1d02602e27ab213aeb8802ec15b2fd6aa20b56a7..b85655493adedc2d9488be208968418d18bf2a7e 100644
--- a/source/utils.py
+++ b/source/utils.py
@@ -23,11 +23,21 @@ from tqdm import tqdm_notebook as tqdm
 # QUALITY CONTROL AND PREPROCESSING #
 
 
-def likelihood_qc(dframe, threshold=0.9):
-    """Returns only rows where all lilelihoods are above a specified threshold"""
+def likelihood_qc(dframe: pd.DataFrame, threshold: float = 0.9) -> np.array:
+    """Returns a DataFrame filtered dataframe, keeping only the rows entirely above the threshold.
+
+        Parameters:
+            - dframe (pandas.DataFrame): DeepLabCut output, with positions over time and associated likelihhod
+            - threshold (float): minimum acceptable confidence
+
+        Returns:
+            - filt_mask (np.array): mask on the rows of dframe"""
+
     Likes = np.array([dframe[i]["likelihood"] for i in list(dframe.columns.levels[0])])
     Likes = np.nan_to_num(Likes, nan=1.0)
-    return np.all(Likes > threshold, axis=0)
+    filt_mask = np.all(Likes > threshold, axis=0)
+
+    return filt_mask
 
 
 def bp2polar(tab: pd.DataFrame) -> pd.DataFrame:
@@ -244,10 +254,9 @@ def rolling_window(a: np.array, window_size: int, window_step: int) -> np.array:
 def smooth_mult_trajectory(series: np.array, alpha: float = 0.15) -> np.array:
     """Returns a smooths a trajectory using exponentially weighted averages
 
-        Parameters:
-            - series (np.array): 2D trajectory array with N (instances) * m (features)
-            - alpha (float): 0 <= alpha <= 1; indicates the weight assigned to the current observation.
-            higher (alpha~1) indicates less smoothing; lower indicates more (alpha~0)
+        Parameters: - series (numpyp.array): 1D trajectory array with N (instances) - alpha (float): 0 <= alpha <= 1;
+        indicates the inverse weight assigned to previous observations. Higher (alpha~1) indicates less smoothing; lower
+        indicates more (alpha~0)
 
         Returns:
             - smoothed_series (np.array): smoothed version of the input, with equal shape"""
@@ -260,7 +269,8 @@ def smooth_mult_trajectory(series: np.array, alpha: float = 0.15) -> np.array:
 
     return smoothed_series
 
-    ##### IMAGE/VIDEO PROCESSING FUNCTIONS #####
+
+# IMAGE/VIDEO PROCESSING FUNCTIONS #
 
 
 def index_frames(video_list, sample=False, index=0, pkl=False):
@@ -286,7 +296,8 @@ def index_frames(video_list, sample=False, index=0, pkl=False):
 
     return True
 
-    ##### BEHAVIOUR RECOGNITION FUNCTIONS #####
+
+# BEHAVIOUR RECOGNITION FUNCTIONS #
 
 
 # Nose to Nose contact
diff --git a/test_deepof/test_utils.py b/test_deepof/test_utils.py
index 69459ccba27f5ca9424fe02ef23dadddefdfa8fa..012cd816fac2aa116924211f0ef36c6c7ea0e950 100644
--- a/test_deepof/test_utils.py
+++ b/test_deepof/test_utils.py
@@ -4,14 +4,54 @@ import sys
 
 sys.path.append("../")
 from hypothesis import given
+from hypothesis import settings
 from hypothesis import strategies as st
 from hypothesis.extra.numpy import arrays
 from hypothesis.extra.pandas import range_indexes, columns, data_frames
-from itertools import combinations
 from scipy.spatial import distance
 from source.utils import *
 
 
+@given(
+    mult=st.integers(min_value=1, max_value=10),
+    dframe=data_frames(
+        index=range_indexes(min_size=1),
+        columns=columns(["X", "y", "likelihood"], dtype=float),
+        rows=st.tuples(
+            st.floats(
+                min_value=0, max_value=1000, allow_nan=False, allow_infinity=False
+            ),
+            st.floats(
+                min_value=0, max_value=1000, allow_nan=False, allow_infinity=False
+            ),
+            st.floats(
+                min_value=0.01, max_value=1.0, allow_nan=False, allow_infinity=False
+            ),
+        ),
+    ),
+    threshold=st.data(),
+)
+def test_likelihood_qc(mult, dframe, threshold):
+    thresh1 = threshold.draw(st.floats(min_value=0.1, max_value=1.0, allow_nan=False))
+    thresh2 = threshold.draw(
+        st.floats(min_value=thresh1, max_value=1.0, allow_nan=False)
+    )
+
+    dframe = pd.concat([dframe] * mult, axis=0)
+    idx = pd.MultiIndex.from_product(
+        [list(dframe.columns[: len(dframe.columns) // 3]), ["X", "y", "likelihood"]],
+        names=["bodyparts", "coords"],
+    )
+    dframe.columns = idx
+
+    filt1 = likelihood_qc(dframe, thresh1)
+    filt2 = likelihood_qc(dframe, thresh2)
+
+    assert np.sum(filt1) <= dframe.shape[0]
+    assert np.sum(filt2) <= dframe.shape[0]
+    assert np.sum(filt1) >= np.sum(filt2)
+
+
 @given(
     tab=data_frames(
         index=range_indexes(min_size=1),
@@ -119,7 +159,6 @@ def test_bpart_distance(cordarray):
     ),
 )
 def test_angle(abc):
-
     a, b, c = abc
 
     angles = []
@@ -164,7 +203,6 @@ def test_angle_trio(array):
     )
 )
 def test_rotate(p):
-
     assert np.allclose(rotate(p, 2 * np.pi), p)
     assert np.allclose(rotate(p, np.pi), -p)
 
@@ -195,7 +233,10 @@ def test_align_trajectories(data):
 @given(a=arrays(dtype=bool, shape=st.tuples(st.integers(min_value=3, max_value=1000))))
 def test_smooth_boolean_array(a):
     smooth = smooth_boolean_array(a)
-    trans = lambda x: sum([i + 1 != i for i in range(x.shape[0] - 1)])
+
+    def trans(x):
+        return sum([i + 1 != i for i in range(x.shape[0] - 1)])
+
     assert trans(a) >= trans(smooth)
 
 
@@ -222,3 +263,37 @@ def test_rolling_window(a, window):
 
     assert len(rolled_shape) == len(a.shape) + 1
     assert rolled_shape[1] == window_size
+
+
+@settings(deadline=None)
+@given(
+    alpha=st.data(),
+    series=arrays(
+        dtype=float,
+        shape=st.tuples(st.integers(min_value=10, max_value=1000),),
+        elements=st.floats(
+            min_value=1.0, max_value=1.0, allow_nan=False, allow_infinity=False
+        ),
+    ),
+)
+def test_smooth_mult_trajectory(alpha, series):
+    alpha1 = alpha.draw(
+        st.floats(min_value=0.1, max_value=1.0, allow_nan=False, allow_infinity=False)
+    )
+    alpha2 = alpha.draw(
+        st.floats(
+            min_value=alpha1, max_value=1.0, allow_nan=False, allow_infinity=False
+        )
+    )
+
+    series *= +np.random.normal(0, 1, len(series))
+
+    smoothed1 = smooth_mult_trajectory(series, alpha1)
+    smoothed2 = smooth_mult_trajectory(series, alpha2)
+
+    def autocorr(x, t=1):
+        return np.corrcoef(np.array([x[:-t], x[t:]]))[0, 1]
+
+    assert autocorr(smoothed1) >= autocorr(series)
+    assert autocorr(smoothed2) >= autocorr(series)
+    assert autocorr(smoothed2) <= autocorr(smoothed1)