From 5feea183753cb1fd17e4377258a97b816272275c Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Fri, 30 Aug 2024 18:03:25 +0100 Subject: [PATCH 01/14] Add filter with nan under threshold and varying window --- tests/test_unit/test_filtering.py | 47 +++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/tests/test_unit/test_filtering.py b/tests/test_unit/test_filtering.py index 1bc59348..66810d65 100644 --- a/tests/test_unit/test_filtering.py +++ b/tests/test_unit/test_filtering.py @@ -233,6 +233,53 @@ def _assert_n_nans_in_position_per_individual( ) +@pytest.mark.parametrize( + "valid_dataset_with_nan", + list_valid_datasets_with_nans, +) +@pytest.mark.parametrize( + "window", + [3, 5, 6, 10], # data is nframes = 10 +) +@pytest.mark.parametrize( + "filter_func", + [median_filter, savgol_filter], +) +def test_filter_with_nans_on_position_varying_window( + valid_dataset_with_nan, window, filter_func, helpers, request +): + """Test that the number of NaNs in the filtered position data + increases at most by the filter's window length minus one + multiplied by the number of consecutive NaNs in the input data. + """ + # Prepare kwargs per filter + kwargs = {"window": window} + if filter_func == savgol_filter: + kwargs["polyorder"] = 2 + + # Filter position + valid_input_dataset = request.getfixturevalue(valid_dataset_with_nan) + position_filtered = filter_func( + valid_input_dataset.position, + **kwargs, + ) + + # Count number of NaNs in the input and filtered position data + n_total_nans_initial = helpers.count_nans(valid_input_dataset.position) + n_consecutive_nans_initial = helpers.count_consecutive_nans( + valid_input_dataset.position + ) + + n_total_nans_filtered = helpers.count_nans(position_filtered) + + max_nans_increase = (window - 1) * n_consecutive_nans_initial + + # Check that filtering does not reduce number of nans + assert n_total_nans_filtered >= n_total_nans_initial + # Check that the increase in nans is below the expected threshold + assert n_total_nans_filtered - n_total_nans_initial <= max_nans_increase + + @pytest.mark.parametrize( "valid_dataset", list_all_valid_datasets, From e507b485db0fda62dd58cc1f733b6e9e53da8701 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Fri, 30 Aug 2024 18:54:44 +0100 Subject: [PATCH 02/14] Get kinematics tests --- tests/conftest.py | 134 ++++++++++++++- tests/test_unit/test_kinematics.py | 262 ++++++++++++++++++----------- 2 files changed, 291 insertions(+), 105 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index f2c77bed..e2e27652 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -237,8 +237,9 @@ def valid_bboxes_arrays_all_zeros(): # --------------------- Bboxes dataset fixtures ---------------------------- +# uniform linear motion @pytest.fixture -def valid_bboxes_array(): +def valid_bboxes_arrays(): """Return a dictionary of valid non-zero arrays for a ValidBboxesDataset. @@ -276,22 +277,22 @@ def valid_bboxes_array(): "position": position, "shape": shape, "confidence": confidence, - "individual_names": ["id_" + str(id) for id in range(n_individuals)], } +# uniform linear motion @pytest.fixture def valid_bboxes_dataset( - valid_bboxes_array, + valid_bboxes_arrays, ): """Return a valid bboxes dataset with low confidence values and time in frames. """ dim_names = MovementDataset.dim_names["bboxes"] - position_array = valid_bboxes_array["position"] - shape_array = valid_bboxes_array["shape"] - confidence_array = valid_bboxes_array["confidence"] + position_array = valid_bboxes_arrays["position"] + shape_array = valid_bboxes_arrays["shape"] + confidence_array = valid_bboxes_arrays["confidence"] n_frames, n_individuals, _ = position_array.shape @@ -409,12 +410,133 @@ def valid_poses_dataset(valid_position_array, request): @pytest.fixture def valid_poses_dataset_with_nan(valid_poses_dataset): """Return a valid pose tracks dataset with NaN values.""" + # Sets position for all keypoints in individual ind1 to NaN + # at timepoints 3, 7, 8 valid_poses_dataset.position.loc[ {"individuals": "ind1", "time": [3, 7, 8]} ] = np.nan return valid_poses_dataset +@pytest.fixture +def valid_poses_array_uniform_linear_motion(): + # define the shape of the arrays + n_frames, n_individuals, n_space = (10, 2, 2) + n_keypoints = 3 + kpt_str2idx = { + "centroid": 0, + "left": 1, + "right": 2, + } + + # define centroid trajectory in position array + # for each individual, the centroid moves along + # the x=+/-y line, starting from the origin. + # - if the index of the individual is even: along x = y line + # - if the index of the individual odd: along x = -y line + # They move one unit along x and y axes in each frame + position = np.empty((n_frames, n_individuals, n_keypoints, n_space)) + for i in range(n_individuals): + position[:, i, kpt_str2idx["centroid"], 0] = np.arange(n_frames) + position[:, i, kpt_str2idx["centroid"], 1] = (-1) ** i * np.arange( + n_frames + ) + + # define trajectory of left and right keypoints + # for individual 0, at each timepoint: + # - the left keypoint (index=1) is at x_centroid, y_centroid + 1 + # - the right keypoint (index=2) is at x_centroid + 1, y_centroid + # for individual 1, at each timepoint: + # - the left keypoint (index=1) is at x_centroid - 1, y_centroid + # - the right keypoint (index=2) is at x_centroid, y_centroid + 1 + offset_in_x = {"x_offset": 1, "y_offset": 0} + offset_in_y = {"x_offset": 0, "y_offset": 1} + + individual_to_side_kpt_offset = { + 0: { + "left": offset_in_y, + "right": offset_in_x, + }, + 1: { + "left": {k: -v for k, v in offset_in_x.items()}, + "right": offset_in_y, + }, + } + + # fill in left and right keypoints in position array + for i in individual_to_side_kpt_offset: + side_kpt_offset = individual_to_side_kpt_offset[i] + for kpt in kpt_str2idx: + if kpt != "centroid": + # x coord + position[:, i, kpt_str2idx[kpt], 0] = ( + position[:, i, 0, 0] + side_kpt_offset[kpt]["x_offset"] + ) + # y coord + position[:, i, kpt_str2idx[kpt], 1] = ( + position[:, i, 0, 1] + side_kpt_offset[kpt]["y_offset"] + ) + + # build an array of confidence values, all 0.9 + confidence = np.full((n_frames, n_individuals, n_keypoints), 0.9) + + # set 5 low-confidence values + # - set 3 confidence values for individual id_0's centroid to 0.1 + # - set 2 confidence values for individual id_1's centroid to 0.1 + idx_start = 2 + confidence[idx_start : idx_start + 3, 0, 0] = 0.1 + confidence[idx_start : idx_start + 2, 1, 0] = 0.1 + + # ensure list of kpt names is sorted by index before returning + # to match arrays + kpt_str2idx_sorted = dict( + sorted( + kpt_str2idx.items(), + key=lambda item: item[1], + ) + ) + + return { + "position": position, + "confidence": confidence, + "keypoint_names": list(kpt_str2idx_sorted.keys()), + } + + +@pytest.fixture +def valid_poses_dataset_uniform_linear_motion( + valid_poses_array_uniform_linear_motion, +): + """Return a valid poses dataset for a uniform linear motion.""" + dim_names = MovementDataset.dim_names["poses"] + + position_array = valid_poses_array_uniform_linear_motion["position"] + confidence_array = valid_poses_array_uniform_linear_motion["confidence"] + keypoint_names = valid_poses_array_uniform_linear_motion["keypoint_names"] + + n_frames, n_individuals, _, _ = position_array.shape + + return xr.Dataset( + data_vars={ + "position": xr.DataArray(position_array, dims=dim_names), + "confidence": xr.DataArray(confidence_array, dims=dim_names[:-1]), + }, + coords={ + dim_names[0]: np.arange(n_frames), + dim_names[1]: [f"id_{i}" for i in range(1, n_individuals + 1)], + dim_names[2]: keypoint_names, + dim_names[3]: ["x", "y"], + }, + attrs={ + "fps": None, + "time_unit": "frames", + "source_software": "test", + "source_file": "test_poses.h5", + "ds_type": "poses", + }, + ) + + # -------------------- Invalid datasets fixtures ------------------------------ @pytest.fixture def not_a_dataset(): diff --git a/tests/test_unit/test_kinematics.py b/tests/test_unit/test_kinematics.py index 1f75a824..6e94d7f7 100644 --- a/tests/test_unit/test_kinematics.py +++ b/tests/test_unit/test_kinematics.py @@ -1,112 +1,176 @@ -from contextlib import nullcontext as does_not_raise - import numpy as np import pytest -import xarray as xr from movement.analysis import kinematics -class TestKinematics: - """Test suite for the kinematics module.""" +@pytest.mark.parametrize( + "valid_dataset_uniform_linear_motion", + [ + "valid_poses_dataset_uniform_linear_motion", + "valid_bboxes_dataset", + ], +) +@pytest.mark.parametrize( + "kinematic_variable, expected_2D_array_per_individual_and_kpt", + [ + ( + "displacement", + { + 0: np.vstack( + [np.zeros((1, 2)), np.ones((9, 2))] + ), # at t=0 displacement is (0,0) + 1: np.multiply( + np.vstack([np.zeros((1, 2)), np.ones((9, 2))]), + np.array([1, -1]), + ), + }, + ), + ( + "velocity", + { + 0: np.ones((10, 2)), + 1: np.multiply(np.ones((10, 2)), np.array([1, -1])), + }, + ), + ( + "acceleration", + { + 0: np.zeros((10, 2)), + 1: np.zeros((10, 2)), + }, + ), + ], +) +def test_kinematics_uniform_linear_motion( + valid_dataset_uniform_linear_motion, + kinematic_variable, + expected_2D_array_per_individual_and_kpt, # 2D: n_frames, n_space_dims + request, +): + """Test computed kinematics for a uniform linear motion case. - @pytest.fixture - def expected_dataarray(self, valid_poses_dataset): - """Return a function to generate the expected dataarray - for different kinematic properties. - """ + Uniform linear motion means the individuals move along a line + at constant velocity. - def _expected_dataarray(property): - """Return an xarray.DataArray with default values and - the expected dimensions and coordinates. - """ - # Expected x,y values for velocity - x_vals = np.array( - [1.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 17.0] - ) - y_vals = np.full((10, 2, 2, 1), 4.0) - if property == "acceleration": - x_vals = np.array( - [1.0, 1.5, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 1.5, 1.0] - ) - y_vals = np.full((10, 2, 2, 1), 0) - elif property == "displacement": - x_vals = np.array( - [0.0, 1.0, 3.0, 5.0, 7.0, 9.0, 11.0, 13.0, 15.0, 17.0] + We consider 2 individuals ("id_0" and "id_1"), + tracked for 10 frames, along x and y: + - id_0 moves along x=y line from the origin + - id_1 moves along x=-y line from the origin + - they both move one unit (pixel) along each axis in each frame + + If the dataset is a poses dataset, we consider 3 keypoints per individual + (centroid, left, right), that are always in front of the centroid keypoint + at 45deg from the trajectory. + """ + position = request.getfixturevalue( + valid_dataset_uniform_linear_motion + ).position + kinematic_variable = getattr(kinematics, f"compute_{kinematic_variable}")( + position + ) + + for ind in expected_2D_array_per_individual_and_kpt: + if "keypoints" in position.coords: + for k in range(position.coords["keypoints"].size): + assert np.allclose( + kinematic_variable.isel( + individuals=ind, keypoints=k + ).values, + expected_2D_array_per_individual_and_kpt[ind], ) - y_vals[0] = 0 - - x_vals = x_vals.reshape(-1, 1, 1, 1) - # Repeat the x_vals to match the shape of the position - x_vals = np.tile(x_vals, (1, 2, 2, 1)) - return xr.DataArray( - np.concatenate( - [x_vals, y_vals], - axis=-1, - ), - dims=valid_poses_dataset.dims, - coords=valid_poses_dataset.coords, + else: + assert np.allclose( + kinematic_variable.isel(individuals=ind).values, + expected_2D_array_per_individual_and_kpt[ind], ) - return _expected_dataarray - kinematic_test_params = [ - ("valid_poses_dataset", does_not_raise()), - ("valid_poses_dataset_with_nan", does_not_raise()), - ("missing_dim_poses_dataset", pytest.raises(ValueError)), - ] - - @pytest.mark.parametrize("ds, expected_exception", kinematic_test_params) - def test_displacement( - self, ds, expected_exception, expected_dataarray, request - ): - """Test displacement computation.""" - ds = request.getfixturevalue(ds) - with expected_exception: - result = kinematics.compute_displacement(ds.position) - expected = expected_dataarray("displacement") - if ds.position.isnull().any(): - expected.loc[ - {"individuals": "ind1", "time": [3, 4, 7, 8, 9]} - ] = np.nan - xr.testing.assert_allclose(result, expected) - - @pytest.mark.parametrize("ds, expected_exception", kinematic_test_params) - def test_velocity( - self, ds, expected_exception, expected_dataarray, request - ): - """Test velocity computation.""" - ds = request.getfixturevalue(ds) - with expected_exception: - result = kinematics.compute_velocity(ds.position) - expected = expected_dataarray("velocity") - if ds.position.isnull().any(): - expected.loc[ - {"individuals": "ind1", "time": [2, 4, 6, 7, 8, 9]} - ] = np.nan - xr.testing.assert_allclose(result, expected) - - @pytest.mark.parametrize("ds, expected_exception", kinematic_test_params) - def test_acceleration( - self, ds, expected_exception, expected_dataarray, request - ): - """Test acceleration computation.""" - ds = request.getfixturevalue(ds) - with expected_exception: - result = kinematics.compute_acceleration(ds.position) - expected = expected_dataarray("acceleration") - if ds.position.isnull().any(): - expected.loc[ - {"individuals": "ind1", "time": [1, 3, 5, 6, 7, 8, 9]} - ] = np.nan - xr.testing.assert_allclose(result, expected) - - @pytest.mark.parametrize("order", [0, -1, 1.0, "1"]) - def test_approximate_derivative_with_invalid_order(self, order): - """Test that an error is raised when the order is non-positive.""" - data = np.arange(10) - expected_exception = ( - ValueError if isinstance(order, int) else TypeError +@pytest.mark.parametrize( + "valid_dataset_with_nan", + [ + "valid_poses_dataset_with_nan", + "valid_bboxes_dataset_with_nan", + ], +) +@pytest.mark.parametrize( + "kinematic_variable, expected_nans_per_individual", + [ + ("displacement", {0: 5, 1: 0}), + ("velocity", {0: 6, 1: 0}), + ("acceleration", {0: 7, 1: 0}), + ], +) +def test_kinematics_with_dataset_with_nans( + valid_dataset_with_nan, + kinematic_variable, + expected_nans_per_individual, + helpers, + request, +): + """Test kinematics computation for a dataset with nans. + + We test that the kinematics can be computed and that the number + of nan values in the kinematic array is as expected. + + """ + # compute kinematic array + valid_dataset = request.getfixturevalue(valid_dataset_with_nan) + position = valid_dataset.position + kinematic_array = getattr(kinematics, f"compute_{kinematic_variable}")( + position + ) + + # compute n nans in kinematic array per individual + n_nans_kinematics_per_indiv = { + i: helpers.count_nans(kinematic_array.isel(individuals=i)) + for i in range(valid_dataset.dims["individuals"]) + } + + # check number of nans per indiv is as expected in kinematic array + for i in range(valid_dataset.dims["individuals"]): + assert n_nans_kinematics_per_indiv[i] == ( + expected_nans_per_individual[i] + * valid_dataset.dims["space"] + * valid_dataset.dims.get("keypoints", 1) ) - with pytest.raises(expected_exception): - kinematics._compute_approximate_time_derivative(data, order=order) + + +@pytest.mark.parametrize( + "invalid_dataset, expected_exception", + [ + ("not_a_dataset", pytest.raises(AttributeError)), + ("empty_dataset", pytest.raises(AttributeError)), + ("missing_var_poses_dataset", pytest.raises(AttributeError)), + ("missing_var_bboxes_dataset", pytest.raises(AttributeError)), + ("missing_dim_poses_dataset", pytest.raises(ValueError)), + ("missing_dim_bboxes_dataset", pytest.raises(ValueError)), + ], +) +@pytest.mark.parametrize( + "kinematic_variable", + [ + "displacement", + "velocity", + "acceleration", + ], +) +def test_kinematics_with_invalid_dataset( + invalid_dataset, + expected_exception, + kinematic_variable, + request, +): + """Test kinematics computation with an invalid dataset.""" + with expected_exception: + position = request.getfixturevalue(invalid_dataset).position + getattr(kinematics, f"compute_{kinematic_variable}")(position) + + +@pytest.mark.parametrize("order", [0, -1, 1.0, "1"]) +def test_approximate_derivative_with_invalid_order(order): + """Test that an error is raised when the order is non-positive.""" + data = np.arange(10) + expected_exception = ValueError if isinstance(order, int) else TypeError + with pytest.raises(expected_exception): + kinematics._compute_approximate_time_derivative(data, order=order) From 67722f75a72cff1397f6c4b5bface2d1d59398c0 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Fri, 30 Aug 2024 19:50:50 +0100 Subject: [PATCH 03/14] Adapt integration tests for kinematics+polar --- .../test_kinematics_vector_transform.py | 118 ++++++++++++++---- 1 file changed, 95 insertions(+), 23 deletions(-) diff --git a/tests/test_integration/test_kinematics_vector_transform.py b/tests/test_integration/test_kinematics_vector_transform.py index 65318a08..ca56b94e 100644 --- a/tests/test_integration/test_kinematics_vector_transform.py +++ b/tests/test_integration/test_kinematics_vector_transform.py @@ -1,33 +1,105 @@ -from contextlib import nullcontext as does_not_raise +import math +import numpy as np import pytest import xarray as xr from movement.utils import vector -class TestKinematicsVectorTransform: - """Test the vector transformation functionality with - various kinematic properties. +@pytest.mark.parametrize( + "valid_dataset_uniform_linear_motion", + [ + "valid_poses_dataset_uniform_linear_motion", + "valid_bboxes_dataset", + ], +) +@pytest.mark.parametrize( + "kinematic_variable, expected_2D_pol_array_per_individual", + [ + ( + "displacement", + { + 0: np.vstack( + [ + np.zeros((1, 2)), + np.tile( + np.array( + [math.sqrt(2), math.atan(1)] + ), # rho, phi=45deg + (9, 1), + ), + ] + ), + 1: np.vstack( + [ + np.zeros((1, 2)), + np.tile( + np.array( + [math.sqrt(2), -math.atan(1)] + ), # rho, phi=-45deg + (9, 1), + ), + ] + ), + }, + ), + ( + "velocity", + { + 0: np.tile( + np.array([math.sqrt(2), math.atan(1)]), # rho, phi=-45deg + (10, 1), + ), + 1: np.tile( + np.array( + [math.sqrt(2), -math.atan(1)] + ), # rho, phi=-135deg + (10, 1), + ), + }, + ), + ( + "acceleration", + { + 0: np.zeros((10, 2)), + 1: np.zeros((10, 2)), + }, + ), + ], +) +def test_cart2pol_transform_on_kinematics( + valid_dataset_uniform_linear_motion, + kinematic_variable, + expected_2D_pol_array_per_individual, + request, +): + """Test transformation between Cartesian and polar coordinates + with various kinematic properties. """ + ds = request.getfixturevalue(valid_dataset_uniform_linear_motion) + kinematic_array_cart = getattr(ds.move, f"compute_{kinematic_variable}")() - @pytest.mark.parametrize( - "ds, expected_exception", - [ - ("valid_poses_dataset", does_not_raise()), - ("valid_poses_dataset_with_nan", does_not_raise()), - ("missing_dim_poses_dataset", pytest.raises(RuntimeError)), - ], + kinematic_array_pol = vector.cart2pol(kinematic_array_cart) + + # Check the polar array is as expected + for ind in expected_2D_pol_array_per_individual: + if "keypoints" in ds.position.coords: + for k in range(ds.position.coords["keypoints"].size): + assert np.allclose( + kinematic_array_pol.isel( + individuals=ind, keypoints=k + ).values, + expected_2D_pol_array_per_individual[ind], + ) + else: + assert np.allclose( + kinematic_array_pol.isel(individuals=ind).values, + expected_2D_pol_array_per_individual[ind], + ) + + # Check we can recover the original Cartesian array? + kinematic_array_cart_recover = vector.pol2cart(kinematic_array_pol) + xr.testing.assert_allclose( + kinematic_array_cart, kinematic_array_cart_recover ) - def test_cart_and_pol_transform( - self, ds, expected_exception, kinematic_property, request - ): - """Test transformation between Cartesian and polar coordinates - with various kinematic properties. - """ - ds = request.getfixturevalue(ds) - with expected_exception: - data = getattr(ds.move, f"compute_{kinematic_property}")() - pol_data = vector.cart2pol(data) - cart_data = vector.pol2cart(pol_data) - xr.testing.assert_allclose(cart_data, data) From de4b839d368486d530d5a1dcbd512ce9d7e6bbd4 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Fri, 30 Aug 2024 20:02:44 +0100 Subject: [PATCH 04/14] Update integration tests for filtering --- tests/test_integration/test_filtering.py | 63 ++++++++++-------------- 1 file changed, 25 insertions(+), 38 deletions(-) diff --git a/tests/test_integration/test_filtering.py b/tests/test_integration/test_filtering.py index 90efce6c..072f7254 100644 --- a/tests/test_integration/test_filtering.py +++ b/tests/test_integration/test_filtering.py @@ -16,63 +16,50 @@ def sample_dataset(): "poses" ] ds = load_poses.from_dlc_file(ds_path) - ds["velocity"] = ds.move.compute_velocity() + return ds @pytest.mark.parametrize("window", [3, 5, 6, 13]) def test_nan_propagation_through_filters(sample_dataset, window, helpers): - """Test NaN propagation when passing a DataArray through - multiple filters sequentially. For the ``median_filter`` - and ``savgol_filter``, the number of NaNs is expected to increase + """Test NaN propagation is as expected when passing a DataArray through + filter by confidence, Savgol filter and interpolation. + For the ``savgol_filter``, the number of NaNs is expected to increase at most by the filter's window length minus one (``window - 1``) multiplied by the number of consecutive NaNs in the input data. """ - # Introduce nans via filter_by_confidence + # Check filter position by confidence behaves as expected + # default threshold is 0.6 for confidence + expected_n_nans = (sample_dataset.confidence.data < 0.6).sum() # 13136 sample_dataset.update( {"position": sample_dataset.move.filter_by_confidence()} ) - expected_n_nans = 13136 - n_nans_confilt = helpers.count_nans(sample_dataset.position) - assert n_nans_confilt == expected_n_nans, ( - f"Expected {expected_n_nans} NaNs in filtered data, " - f"got: {n_nans_confilt}" - ) - n_consecutive_nans = helpers.count_consecutive_nans( - sample_dataset.position - ) - # Apply median filter and check that - # it doesn't introduce too many or too few NaNs - sample_dataset.update( - {"position": sample_dataset.move.median_filter(window)} + n_total_nans_input = helpers.count_nans(sample_dataset.position) + + assert n_total_nans_input == expected_n_nans, ( + f"Expected {expected_n_nans} NaNs in input data, " + f"got: {n_total_nans_input}" ) - n_nans_medfilt = helpers.count_nans(sample_dataset.position) - max_nans_increase = (window - 1) * n_consecutive_nans - assert ( - n_nans_medfilt <= n_nans_confilt + max_nans_increase - ), "Median filter introduced more NaNs than expected." - assert ( - n_nans_medfilt >= n_nans_confilt - ), "Median filter mysteriously removed NaNs." - n_consecutive_nans = helpers.count_consecutive_nans( + + # Compute maximum expected increase in NaNs due to filtering + n_consecutive_nans_input = helpers.count_consecutive_nans( sample_dataset.position ) + max_nans_increase = (window - 1) * n_consecutive_nans_input - # Apply savgol filter and check that - # it doesn't introduce too many or too few NaNs + # Apply savgol filter and check that number of NaNs is within threshold sample_dataset.update( {"position": sample_dataset.move.savgol_filter(window, polyorder=2)} ) - n_nans_savgol = helpers.count_nans(sample_dataset.position) - max_nans_increase = (window - 1) * n_consecutive_nans - assert ( - n_nans_savgol <= n_nans_medfilt + max_nans_increase - ), "Savgol filter introduced more NaNs than expected." - assert ( - n_nans_savgol >= n_nans_medfilt - ), "Savgol filter mysteriously removed NaNs." - # Interpolate data (without max_gap) to eliminate all NaNs + n_total_nans_savgol = helpers.count_nans(sample_dataset.position) + + # Check that filtering does not reduce number of nans + assert n_total_nans_savgol >= n_total_nans_input + # Check that the increase in nans is below the expected threshold + assert n_total_nans_savgol - n_total_nans_input <= max_nans_increase + + # Interpolate data (without max_gap) and check it eliminates all NaNs sample_dataset.update( {"position": sample_dataset.move.interpolate_over_time()} ) From a71a132b41320dc91b4dfb3af130fd82f501f81a Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Fri, 30 Aug 2024 20:14:06 +0100 Subject: [PATCH 05/14] Fix factor 2 difference --- tests/test_integration/test_filtering.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/tests/test_integration/test_filtering.py b/tests/test_integration/test_filtering.py index 072f7254..cba430f0 100644 --- a/tests/test_integration/test_filtering.py +++ b/tests/test_integration/test_filtering.py @@ -9,9 +9,7 @@ @pytest.fixture def sample_dataset(): - """Return a single-animal sample dataset, with time unit in frames. - This allows us to better control the expected number of NaNs in the tests. - """ + """Return a single-animal sample dataset, with time unit in frames.""" ds_path = fetch_dataset_paths("DLC_single-mouse_EPM.predictions.h5")[ "poses" ] @@ -28,17 +26,18 @@ def test_nan_propagation_through_filters(sample_dataset, window, helpers): at most by the filter's window length minus one (``window - 1``) multiplied by the number of consecutive NaNs in the input data. """ - # Check filter position by confidence behaves as expected - # default threshold is 0.6 for confidence - expected_n_nans = (sample_dataset.confidence.data < 0.6).sum() # 13136 + # Compute number of low confidence keypoints + n_low_confidence_kpts = (sample_dataset.confidence.data < 0.6).sum() + + # Check filter position by confidence creates correct number of NaNs sample_dataset.update( {"position": sample_dataset.move.filter_by_confidence()} ) n_total_nans_input = helpers.count_nans(sample_dataset.position) - assert n_total_nans_input == expected_n_nans, ( - f"Expected {expected_n_nans} NaNs in input data, " - f"got: {n_total_nans_input}" + assert ( + n_total_nans_input + == n_low_confidence_kpts * sample_dataset.dims["space"] ) # Compute maximum expected increase in NaNs due to filtering @@ -92,6 +91,9 @@ def test_accessor_filter_method( applied, if valid data variables are passed, otherwise raise an exception. """ + # Compute velocity + sample_dataset["velocity"] = sample_dataset.move.compute_velocity() + with expected_exception as expected_type: if method in ["median_filter", "savgol_filter"]: # supply required "window" argument From 1be0dd320d6eeab9e521023509b8b0e9640b5a2b Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Fri, 30 Aug 2024 20:16:24 +0100 Subject: [PATCH 06/14] Update conftest --- tests/conftest.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e2e27652..a3040299 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -447,19 +447,19 @@ def valid_poses_array_uniform_linear_motion(): # - the left keypoint (index=1) is at x_centroid, y_centroid + 1 # - the right keypoint (index=2) is at x_centroid + 1, y_centroid # for individual 1, at each timepoint: - # - the left keypoint (index=1) is at x_centroid - 1, y_centroid - # - the right keypoint (index=2) is at x_centroid, y_centroid + 1 - offset_in_x = {"x_offset": 1, "y_offset": 0} - offset_in_y = {"x_offset": 0, "y_offset": 1} + # - the left keypoint (index=1) is at x_centroid + 1, y_centroid + # - the right keypoint (index=2) is at x_centroid, y_centroid - 1 + offset_in_xpos = {"x_offset": 1, "y_offset": 0} + offset_in_ypos = {"x_offset": 0, "y_offset": 1} individual_to_side_kpt_offset = { 0: { - "left": offset_in_y, - "right": offset_in_x, + "left": offset_in_ypos, + "right": offset_in_xpos, }, 1: { - "left": {k: -v for k, v in offset_in_x.items()}, - "right": offset_in_y, + "left": offset_in_xpos, + "right": {k: -v for k, v in offset_in_ypos.items()}, }, } From 93e0374a6e0909f3de2f6516cb341114634f2b15 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Mon, 16 Sep 2024 18:19:29 +0100 Subject: [PATCH 07/14] Remove redundant comment in conftest --- tests/conftest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index a32ed078..272e5eaa 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -237,7 +237,6 @@ def valid_bboxes_arrays_all_zeros(): # --------------------- Bboxes dataset fixtures ---------------------------- -# uniform linear motion @pytest.fixture def valid_bboxes_arrays(): """Return a dictionary of valid arrays for a @@ -286,7 +285,6 @@ def valid_bboxes_arrays(): } -# uniform linear motion @pytest.fixture def valid_bboxes_dataset( valid_bboxes_arrays, From 6ba7271d2fe7e827493e17bd0876741e4d4c5500 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Mon, 16 Sep 2024 18:44:49 +0100 Subject: [PATCH 08/14] Apply feedback from kinematic tests --- .../test_kinematics_vector_transform.py | 92 ++++++++----------- 1 file changed, 40 insertions(+), 52 deletions(-) diff --git a/tests/test_integration/test_kinematics_vector_transform.py b/tests/test_integration/test_kinematics_vector_transform.py index ca56b94e..b33c2bfc 100644 --- a/tests/test_integration/test_kinematics_vector_transform.py +++ b/tests/test_integration/test_kinematics_vector_transform.py @@ -19,52 +19,38 @@ [ ( "displacement", - { - 0: np.vstack( + [ + np.vstack( [ np.zeros((1, 2)), - np.tile( - np.array( - [math.sqrt(2), math.atan(1)] - ), # rho, phi=45deg - (9, 1), - ), - ] - ), - 1: np.vstack( + np.tile([math.sqrt(2), math.atan(1)], (9, 1)), + ], + ), # Individual 0, rho=sqrt(2), phi=45deg + np.vstack( [ np.zeros((1, 2)), - np.tile( - np.array( - [math.sqrt(2), -math.atan(1)] - ), # rho, phi=-45deg - (9, 1), - ), + np.tile([math.sqrt(2), -math.atan(1)], (9, 1)), ] - ), - }, + ), # Individual 1, rho=sqrt(2), phi=-45deg + ], ), ( "velocity", - { - 0: np.tile( - np.array([math.sqrt(2), math.atan(1)]), # rho, phi=-45deg - (10, 1), - ), - 1: np.tile( - np.array( - [math.sqrt(2), -math.atan(1)] - ), # rho, phi=-135deg - (10, 1), - ), - }, + [ + np.tile( + [math.sqrt(2), math.atan(1)], (10, 1) + ), # Individual O, rho, phi=45deg + np.tile( + [math.sqrt(2), -math.atan(1)], (10, 1) + ), # Individual 1, rho, phi=-45deg + ], ), ( "acceleration", - { - 0: np.zeros((10, 2)), - 1: np.zeros((10, 2)), - }, + [ + np.zeros((10, 2)), + np.zeros((10, 2)), + ], ), ], ) @@ -79,26 +65,28 @@ def test_cart2pol_transform_on_kinematics( """ ds = request.getfixturevalue(valid_dataset_uniform_linear_motion) kinematic_array_cart = getattr(ds.move, f"compute_{kinematic_variable}")() - kinematic_array_pol = vector.cart2pol(kinematic_array_cart) - # Check the polar array is as expected - for ind in expected_2D_pol_array_per_individual: - if "keypoints" in ds.position.coords: - for k in range(ds.position.coords["keypoints"].size): - assert np.allclose( - kinematic_array_pol.isel( - individuals=ind, keypoints=k - ).values, - expected_2D_pol_array_per_individual[ind], - ) - else: - assert np.allclose( - kinematic_array_pol.isel(individuals=ind).values, - expected_2D_pol_array_per_individual[ind], - ) + # Build expected data array + expected_array_pol = xr.DataArray( + np.stack(expected_2D_pol_array_per_individual, axis=1), + # Stack along the "individuals" axis + dims=["time", "individuals", "space"], + ) + if "keypoints" in ds.position.coords: + expected_array_pol = expected_array_pol.expand_dims( + {"keypoints": ds.position.coords["keypoints"].size} + ) + expected_array_pol = expected_array_pol.transpose( + "time", "individuals", "keypoints", "space" + ) + + # Compare the values of the kinematic_array against the expected_array + np.testing.assert_allclose( + kinematic_array_pol.values, expected_array_pol.values + ) - # Check we can recover the original Cartesian array? + # Check we can recover the original Cartesian array kinematic_array_cart_recover = vector.pol2cart(kinematic_array_pol) xr.testing.assert_allclose( kinematic_array_cart, kinematic_array_cart_recover From d96f465f7e9784b03f526f194bc00b7632321259 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Mon, 16 Sep 2024 18:46:39 +0100 Subject: [PATCH 09/14] Cosmetic changes --- .../test_kinematics_vector_transform.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_integration/test_kinematics_vector_transform.py b/tests/test_integration/test_kinematics_vector_transform.py index b33c2bfc..63ecc2e4 100644 --- a/tests/test_integration/test_kinematics_vector_transform.py +++ b/tests/test_integration/test_kinematics_vector_transform.py @@ -15,7 +15,7 @@ ], ) @pytest.mark.parametrize( - "kinematic_variable, expected_2D_pol_array_per_individual", + "kinematic_variable, expected_kinematics_polar", [ ( "displacement", @@ -48,8 +48,8 @@ ( "acceleration", [ - np.zeros((10, 2)), - np.zeros((10, 2)), + np.zeros((10, 2)), # Individual 0 + np.zeros((10, 2)), # Individual 1 ], ), ], @@ -57,7 +57,7 @@ def test_cart2pol_transform_on_kinematics( valid_dataset_uniform_linear_motion, kinematic_variable, - expected_2D_pol_array_per_individual, + expected_kinematics_polar, request, ): """Test transformation between Cartesian and polar coordinates @@ -69,7 +69,7 @@ def test_cart2pol_transform_on_kinematics( # Build expected data array expected_array_pol = xr.DataArray( - np.stack(expected_2D_pol_array_per_individual, axis=1), + np.stack(expected_kinematics_polar, axis=1), # Stack along the "individuals" axis dims=["time", "individuals", "space"], ) From 1e03f0f16228464d8c9a2ae33135320cec07c4c3 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Thu, 17 Oct 2024 11:59:05 +0200 Subject: [PATCH 10/14] Spoof user-agent to avoid 403 error --- docs/source/conf.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index 9b051fb0..acbd1226 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -175,6 +175,14 @@ linkcheck_ignore = [ "https://pubs.acs.org/doi/*", # Checking dois is forbidden here ] +# Spoof user agent to avoid 403 errors, see +# https://github.com/sphinx-doc/sphinx/issues/10343#issuecomment-1097430133 +linkcheck_request_headers = { + r"https://opensource.org/license/bsd-3-clause/": { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:92.0) " + "Gecko/20100101 Firefox/92.0" + } +} myst_url_schemes = { "http": None, From 050859003ee7627f9ac34cb6aab490892a905139 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Thu, 17 Oct 2024 12:16:56 +0200 Subject: [PATCH 11/14] Check different URL --- docs/source/community/license.md | 2 +- docs/source/conf.py | 8 -------- 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/docs/source/community/license.md b/docs/source/community/license.md index 9f088494..cd32ea81 100644 --- a/docs/source/community/license.md +++ b/docs/source/community/license.md @@ -1,6 +1,6 @@ # License -[The 3-Clause BSD License](https://opensource.org/license/bsd-3-clause/) +[The 3-Clause BSD License](https://opensource.org/licenses/BSD-3-Clause) ```{include} ../../../LICENSE ``` diff --git a/docs/source/conf.py b/docs/source/conf.py index acbd1226..9b051fb0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -175,14 +175,6 @@ linkcheck_ignore = [ "https://pubs.acs.org/doi/*", # Checking dois is forbidden here ] -# Spoof user agent to avoid 403 errors, see -# https://github.com/sphinx-doc/sphinx/issues/10343#issuecomment-1097430133 -linkcheck_request_headers = { - r"https://opensource.org/license/bsd-3-clause/": { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:92.0) " - "Gecko/20100101 Firefox/92.0" - } -} myst_url_schemes = { "http": None, From 01df14e7f31ff24f19e8279319af97ed5debe993 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Thu, 17 Oct 2024 12:21:50 +0200 Subject: [PATCH 12/14] Ignore link to license temporarily --- docs/source/community/license.md | 2 +- docs/source/conf.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/source/community/license.md b/docs/source/community/license.md index cd32ea81..9f088494 100644 --- a/docs/source/community/license.md +++ b/docs/source/community/license.md @@ -1,6 +1,6 @@ # License -[The 3-Clause BSD License](https://opensource.org/licenses/BSD-3-Clause) +[The 3-Clause BSD License](https://opensource.org/license/bsd-3-clause/) ```{include} ../../../LICENSE ``` diff --git a/docs/source/conf.py b/docs/source/conf.py index 9b051fb0..fda3e86f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -174,6 +174,7 @@ # A list of regular expressions that match URIs that should not be checked linkcheck_ignore = [ "https://pubs.acs.org/doi/*", # Checking dois is forbidden here + "https://opensource.org/license/bsd-3-clause/", # to avoid odd 403 error ] myst_url_schemes = { From d67de0edc0bb0baea7a2d6fc23bf94237988a5df Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Thu, 17 Oct 2024 12:41:22 +0200 Subject: [PATCH 13/14] Try fake-useragent --- docs/requirements.txt | 1 + docs/source/conf.py | 13 ++++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 63615d66..5d556129 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,5 @@ -e . +fake-useragent linkify-it-py myst-parser nbsphinx diff --git a/docs/source/conf.py b/docs/source/conf.py index fda3e86f..717270de 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -10,6 +10,7 @@ import sys import setuptools_scm +from fake_useragent import UserAgent # Used when building API docs, put the dependencies # of any class you are documenting here @@ -174,9 +175,19 @@ # A list of regular expressions that match URIs that should not be checked linkcheck_ignore = [ "https://pubs.acs.org/doi/*", # Checking dois is forbidden here - "https://opensource.org/license/bsd-3-clause/", # to avoid odd 403 error + # "https://opensource.org/license/bsd-3-clause/", # to avoid 403 error ] +# Generate a random User-Agent string +ua = UserAgent() +# Spoof user agent to avoid 403 errors, see +# https://github.com/sphinx-doc/sphinx/issues/10343#issuecomment-1097430133 +linkcheck_request_headers = { + "https://opensource.org/license/bsd-3-clause/": { + "User-Agent": ua.random + } +} + myst_url_schemes = { "http": None, "https": None, From e947f6ce8dd9e5523ddc2713410f12b37605a1f4 Mon Sep 17 00:00:00 2001 From: sfmig <33267254+sfmig@users.noreply.github.com> Date: Thu, 17 Oct 2024 13:31:21 +0200 Subject: [PATCH 14/14] Revert "Try fake-useragent" This reverts commit d67de0edc0bb0baea7a2d6fc23bf94237988a5df. --- docs/requirements.txt | 1 - docs/source/conf.py | 13 +------------ 2 files changed, 1 insertion(+), 13 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 5d556129..63615d66 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,5 +1,4 @@ -e . -fake-useragent linkify-it-py myst-parser nbsphinx diff --git a/docs/source/conf.py b/docs/source/conf.py index 717270de..fda3e86f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -10,7 +10,6 @@ import sys import setuptools_scm -from fake_useragent import UserAgent # Used when building API docs, put the dependencies # of any class you are documenting here @@ -175,19 +174,9 @@ # A list of regular expressions that match URIs that should not be checked linkcheck_ignore = [ "https://pubs.acs.org/doi/*", # Checking dois is forbidden here - # "https://opensource.org/license/bsd-3-clause/", # to avoid 403 error + "https://opensource.org/license/bsd-3-clause/", # to avoid odd 403 error ] -# Generate a random User-Agent string -ua = UserAgent() -# Spoof user agent to avoid 403 errors, see -# https://github.com/sphinx-doc/sphinx/issues/10343#issuecomment-1097430133 -linkcheck_request_headers = { - "https://opensource.org/license/bsd-3-clause/": { - "User-Agent": ua.random - } -} - myst_url_schemes = { "http": None, "https": None,