Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/data/src/pyearthtools/data/transforms/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,5 +94,5 @@ def apply(self, dataset):
"get_default_transform",
"derive",
"Derive",
"projections",
"projection",
]
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def _get_method(cls, method: Callable | str | dict[str, Callable | str]):
AttributeError: If method is invalid
"""
if (
method == None
method is None
or isinstance(method, (dict, Callable))
or method in known_methods
or hasattr(aggregation, method)
Expand Down
4 changes: 2 additions & 2 deletions packages/data/src/pyearthtools/data/transforms/coordinates.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def apply(self, dataset):
def _standardise(dataset):
if not any(dataset[self._longitude_name] < 0):
return dataset
func = lambda x: x % 360
func = lambda x: x % 360 # noqa
dataset = dataset.assign_coords({self._longitude_name: func(dataset[self._longitude_name])})
return dataset.sortby(self._longitude_name)

Expand All @@ -128,7 +128,7 @@ def _standardise(dataset):
def _standardise(dataset):
if not any(dataset[self._longitude_name] > 180):
return dataset
func = lambda x: ((x + 180) % 360) - 180
func = lambda x: ((x + 180) % 360) - 180 # noqa
# (180 - abs(x - 180)) * np.sign((x - 180)) * -1
dataset = dataset.assign_coords({self._longitude_name: func(dataset[self._longitude_name])})
return dataset.sortby(self._longitude_name)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def apply(self, dataset: xr.Dataset) -> xr.Dataset:
def encode(x):
return x

tr_revert_reindex = pyearthtools.data.transforms.coordinates.ReIndex(dataset.coords) # type: ignore
tf_revert_reindex = pyearthtools.data.transforms.coordinates.ReIndex(dataset.coords) # type: ignore
tf_reindex = pyearthtools.data.transforms.coordinates.ReIndex(
{key: "sorted" for key in dataset.coords if len(np.atleast_1d(dataset.coords[key].values)) > 1}
) # type: ignore
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,11 @@

"""

from pyearthtools.data.transforms.normalisation import _utils
# from pyearthtools.data.transforms.normalisation import _utils
from pyearthtools.data.transforms.normalisation.normalise import Normalise
from pyearthtools.data.transforms.normalisation.denormalise import Denormalise

__all__ = [
"Normalise",
"Denormalise",
]
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@
import numpy as np
import xarray as xr

xr.set_options(keep_attrs=True)

from pyearthtools.data.transforms.normalisation.default import Normaliser
from pyearthtools.data.transforms.transform import FunctionTransform, Transform

xr.set_options(keep_attrs=True)


class Normalise(Normaliser):
"""
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from dataclasses import dataclass, field, KW_ONLY
from dataclasses import dataclass # , field, KW_ONL
import xarray as xr

from . import _projection_manager as projmanager
Expand Down Expand Up @@ -89,3 +89,6 @@ def __call__(self, ds: xr.Dataset):
# define any custom post processing below
# >>>
return ds_interp


__all__ = ["Rainfields3ProjAus", "HimawariProjAus"]
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def get_crs_or_default(self, ds: xr.Dataset = None) -> pyproj.CRS:
warnings.warn(warn_proj_defaulted)
return default_proj

raise NotImplementedError(f"Attribute extraction from {proj_kind} is unsupported")
raise NotImplementedError("Attribute extraction from this proj kind is unsupported")


class CoordUnits(IntEnum):
Expand Down Expand Up @@ -551,7 +551,7 @@ def _grid_points_to_dataarray_coords(
# to account for pesky floating point issues - not perfect but good enough
# if anything, keeping this weak means interpolation can still trigger instead
# of overzealously raising errors.
approx_unique = lambda _v: np.unique(np.round(_v * 1e6) // 1e6)
approx_unique = lambda _v: np.unique(np.round(_v * 1e6) // 1e6) # noqa

# check that the conversion is indeed unique.
if (approx_unique(lon_1d) == approx_unique(lon_grid)).all() or (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ def __getitem__(self, index) -> Transform | TransformCollection:
for trans in self._transforms:
if isinstance(index, str) and trans.__class__.__name__ == index:
return trans
elif type(trans) == index:
elif type(trans) is index:
return trans
elif isinstance(index, int):
return self._transforms[index]
Expand Down
2 changes: 2 additions & 0 deletions packages/data/tests/data/operations/test_index_routines.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def make_dataset(list_of_paths, engine=None, chunks=None, combine_attrs=None, pa
monkeypatch.setattr(xr, "open_mfdataset", make_dataset)

result = index_routines._mf_series(data_function, start_time, end_time, interval)
assert result is not None


def test_safe_series(monkeypatch):
Expand Down Expand Up @@ -59,3 +60,4 @@ def dummy_series(datafn, start_time, end_time, interval, skip_invalid):
monkeypatch.setattr(index_routines, "series", dummy_series)

result = index_routines.safe_series(data_function, start_time, end_time, interval)
assert result is not None
3 changes: 2 additions & 1 deletion packages/data/tests/indexes/test_fake.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,5 @@ def test_FakeIndex():

fi = FakeIndex(["temperature", "humidity"])
result = fi.get("2020-01-01")
fi._desc_
assert result is not None
assert fi._desc_ is not None
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,5 @@ def test_repr_mixin():

subject = MixableTestClass()

r = repr(subject)
h = subject._repr_html_() # Used by Jupyter
_r = repr(subject)
_h = subject._repr_html_() # Used by Jupyter
5 changes: 4 additions & 1 deletion packages/data/tests/operations/test_binning.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,17 @@ def test_binning():

# Smoke tests
binned = binning(da, "daily")
assert binned is not None
binned = binning(da, "daily", expand=False)
assert binned is not None

offset = TimeDelta(1, "days")
binned = binning(da, "daily", offset=offset)
assert binned is not None

# Test exceptions
with pytest.raises(ValueError):
binned = binning(da, "wobbly")
_binned = binning(da, "wobbly")

with pytest.raises(AttributeError):
_binned = binning(da, "daily", dimension="strange")