diff --git a/CHANGELOG.md b/CHANGELOG.md index 1e97d87..20ed71e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,12 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/). -## Unreleased +## 0.4.1 (2025-04-08) Fixed: - Attributes are now properly preserved when updating coordinates during pre-formatting for regridding ([#54](https://github.com/xarray-contrib/xarray-regrid/pull/54)). - Handle datasets with inconsistent chunksizes during pre-formatting ([#57](https://github.com/xarray-contrib/xarray-regrid/pull/57)). - + - `regrid.stats` should properly regrid input data even when coordinates are not monotonic ([#58](https://github.com/xarray-contrib/xarray-regrid/pull/58)). ## 0.4.0 (2024-09-26) diff --git a/src/xarray_regrid/__init__.py b/src/xarray_regrid/__init__.py index a4c10e8..7b565fc 100644 --- a/src/xarray_regrid/__init__.py +++ b/src/xarray_regrid/__init__.py @@ -9,4 +9,4 @@ "methods", ] -__version__ = "0.4.0" +__version__ = "0.4.1" diff --git a/src/xarray_regrid/methods/flox_reduce.py b/src/xarray_regrid/methods/flox_reduce.py index a254e40..6045306 100644 --- a/src/xarray_regrid/methods/flox_reduce.py +++ b/src/xarray_regrid/methods/flox_reduce.py @@ -72,15 +72,19 @@ def statistic_reduce( msg = f"Invalid method. Please choose from '{valid_methods}'." raise ValueError(msg) - coords = utils.common_coords(data, target_ds, remove_coord=time_dim) - target_coords = xr.Dataset(target_ds.coords) # coords target coords for reindexing - sorted_target_coords = target_coords.sortby(coords) + # Make sure the regridding coordinates are sorted + coord_names = utils.common_coords(data, target_ds, remove_coord=time_dim) + sorted_target_coords = xr.Dataset(coords=target_ds.coords) + for coord_name in coord_names: + sorted_target_coords = utils.ensure_monotonic(sorted_target_coords, coord_name) + data = utils.ensure_monotonic(data, coord_name) + coords = {name: sorted_target_coords[name] for name in coord_names} bounds = tuple( construct_intervals(sorted_target_coords[coord].to_numpy()) for coord in coords ) - data = reduce_data_to_new_domain(data, sorted_target_coords, coords) + data = reduce_data_to_new_domain(data, sorted_target_coords, coord_names) result: xr.Dataset = flox.xarray.xarray_reduce( data, @@ -91,8 +95,8 @@ def statistic_reduce( fill_value=fill_value, ) - result = restore_properties(result, data, target_ds, coords, fill_value) - result = result.reindex_like(target_coords, copy=False) + result = restore_properties(result, data, target_ds, coord_names, fill_value) + result = result.reindex_like(sorted_target_coords, copy=False) return result diff --git a/tests/test_reduce.py b/tests/test_reduce.py index a14fd5c..fbc7a21 100644 --- a/tests/test_reduce.py +++ b/tests/test_reduce.py @@ -233,3 +233,28 @@ def test_var(dummy_lc_data, dummy_target_grid): dummy_lc_data["lc"].astype(float).regrid.stat(dummy_target_grid, "var"), make_expected_ds(expected_data)["lc"], ) + + +def test_unsorted_coords(dummy_lc_data, dummy_target_grid): + """Should pass if the input data has coords that are not ordered.""" + expected_data = np.array( + [ + [0.0, 0.0, 1.0, 0.0, 0.0, 0.0], + [1.0, 0.75, 0.75, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [2.25, 0.0, 0.0, 0.0, 0.0, 0.25], + [0.0, 1.6875, 2.25, 0.0, 0.25, 0.0], + ] + ) + lc_data = dummy_lc_data.copy() + + lc_data["scramble_order"] = lc_data["latitude"] * 0 + np.array( + [1, 3, 7, 0, 2, 8, 9, -1, 5, 11, 12] + ) + lc_data = lc_data.sortby("scramble_order") + + xr.testing.assert_equal( + lc_data["lc"].astype(float).regrid.stat(dummy_target_grid, "var"), + make_expected_ds(expected_data)["lc"], + )