From d024c536a12e4419fe7608a3c6f84f17c5ee587b Mon Sep 17 00:00:00 2001 From: Mike Taves Date: Mon, 25 Aug 2025 15:16:15 +1200 Subject: [PATCH] Allow path-like objects for vector source; use Path for tests --- pyproject.toml | 10 ++- pytest.ini | 6 -- src/rasterstats/io.py | 4 +- src/rasterstats/main.py | 6 +- tests/test_cli.py | 37 +++++------ tests/test_io.py | 19 +++--- tests/test_utils.py | 14 ++--- tests/test_zonal.py | 132 +++++++++++++++++++--------------------- 8 files changed, 111 insertions(+), 117 deletions(-) delete mode 100644 pytest.ini diff --git a/pyproject.toml b/pyproject.toml index 595ef97..e29530d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ classifiers = [ requires-python = ">=3.7" dependencies = [ "affine", - "click >7.1", + "click >7.1, !=8.2.1", "cligj >=0.4", "fiona", "numpy >=1.9", @@ -66,6 +66,14 @@ pointquery = "rasterstats.cli:pointquery" Documentation = "https://pythonhosted.org/rasterstats/" "Source Code" = "https://github.com/perrygeo/python-rasterstats" +[tool.pytest.ini_options] +filterwarnings = [ + "error", + "ignore::UserWarning", +] +testpaths = ["tests"] +# addopts = "--verbose -rf --ipdb --maxfail=1" + [tool.setuptools.dynamic] version = {attr = "rasterstats._version.__version__"} diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 3cccd0e..0000000 --- a/pytest.ini +++ /dev/null @@ -1,6 +0,0 @@ -[pytest] -filterwarnings = - error - ignore::UserWarning -norecursedirs = examples* src* scripts* docs* -# addopts = --verbose -rf --ipdb --maxfail=1 diff --git a/src/rasterstats/io.py b/src/rasterstats/io.py index 3f9123b..72ca31f 100644 --- a/src/rasterstats/io.py +++ b/src/rasterstats/io.py @@ -3,6 +3,7 @@ import warnings from collections.abc import Iterable, Mapping from json import JSONDecodeError +from os import PathLike import fiona import numpy as np @@ -90,7 +91,8 @@ def parse_feature(obj): def read_features(obj, layer=0): features_iter = None - if isinstance(obj, str): + if isinstance(obj, (str, PathLike)): + obj = str(obj) try: # test it as fiona data source with fiona.open(obj, "r", layer=layer) as src: diff --git a/src/rasterstats/main.py b/src/rasterstats/main.py index a263e06..f39dc1a 100644 --- a/src/rasterstats/main.py +++ b/src/rasterstats/main.py @@ -75,9 +75,11 @@ def gen_zonal_stats( Parameters ---------- - vectors: path to an vector source or geo-like python objects + vectors : str or PathLike + Path to an vector source or geo-like python objects. - raster: ndarray or path to a GDAL raster source + raster: array_like, str or PathLike + NumPy array or path to a GDAL raster source. If ndarray is passed, the ``affine`` kwarg is required. layer: int or string, optional diff --git a/tests/test_cli.py b/tests/test_cli.py index a500fb3..ccd7314 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,6 +1,6 @@ import json -import os.path import warnings +from pathlib import Path from click.testing import CliRunner @@ -9,10 +9,11 @@ # Some warnings must be ignored to parse output properly # https://github.com/pallets/click/issues/371#issuecomment-223790894 +data_dir = Path(__file__).parent / "data" def test_cli_feature(): - raster = os.path.join(os.path.dirname(__file__), "data/slope.tif") - vector = os.path.join(os.path.dirname(__file__), "data/feature.geojson") + raster = str(data_dir / "slope.tif") + vector = str(data_dir / "feature.geojson") runner = CliRunner() warnings.simplefilter("ignore") result = runner.invoke( @@ -28,15 +29,15 @@ def test_cli_feature(): def test_cli_feature_stdin(): - raster = os.path.join(os.path.dirname(__file__), "data/slope.tif") - vector = os.path.join(os.path.dirname(__file__), "data/feature.geojson") + raster = str(data_dir / "slope.tif") + vector_pth = data_dir / "feature.geojson" runner = CliRunner() warnings.simplefilter("ignore") result = runner.invoke( zonalstats, ["--raster", raster, "--stats", "all", "--prefix", "test_"], - input=open(vector).read(), + input=vector_pth.read_text(), ) assert result.exit_code == 0 outdata = json.loads(result.output) @@ -47,8 +48,8 @@ def test_cli_feature_stdin(): def test_cli_features_sequence(): - raster = os.path.join(os.path.dirname(__file__), "data/slope.tif") - vector = os.path.join(os.path.dirname(__file__), "data/featurecollection.geojson") + raster = str(data_dir / "slope.tif") + vector = str(data_dir / "featurecollection.geojson") runner = CliRunner() result = runner.invoke( zonalstats, @@ -71,8 +72,8 @@ def test_cli_features_sequence(): def test_cli_features_sequence_rs(): - raster = os.path.join(os.path.dirname(__file__), "data/slope.tif") - vector = os.path.join(os.path.dirname(__file__), "data/featurecollection.geojson") + raster = str(data_dir / "slope.tif") + vector = str(data_dir / "featurecollection.geojson") runner = CliRunner() result = runner.invoke( zonalstats, @@ -94,8 +95,8 @@ def test_cli_features_sequence_rs(): def test_cli_featurecollection(): - raster = os.path.join(os.path.dirname(__file__), "data/slope.tif") - vector = os.path.join(os.path.dirname(__file__), "data/featurecollection.geojson") + raster = str(data_dir / "slope.tif") + vector = str(data_dir / "featurecollection.geojson") runner = CliRunner() result = runner.invoke( zonalstats, [vector, "--raster", raster, "--stats", "mean", "--prefix", "test_"] @@ -110,8 +111,8 @@ def test_cli_featurecollection(): def test_cli_pointquery(): - raster = os.path.join(os.path.dirname(__file__), "data/slope.tif") - vector = os.path.join(os.path.dirname(__file__), "data/featurecollection.geojson") + raster = str(data_dir / "slope.tif") + vector = str(data_dir / "featurecollection.geojson") runner = CliRunner() result = runner.invoke( pointquery, [vector, "--raster", raster, "--property-name", "slope"] @@ -124,8 +125,8 @@ def test_cli_pointquery(): def test_cli_point_sequence(): - raster = os.path.join(os.path.dirname(__file__), "data/slope.tif") - vector = os.path.join(os.path.dirname(__file__), "data/featurecollection.geojson") + raster = str(data_dir / "slope.tif") + vector = str(data_dir / "featurecollection.geojson") runner = CliRunner() result = runner.invoke( pointquery, @@ -139,8 +140,8 @@ def test_cli_point_sequence(): def test_cli_point_sequence_rs(): - raster = os.path.join(os.path.dirname(__file__), "data/slope.tif") - vector = os.path.join(os.path.dirname(__file__), "data/featurecollection.geojson") + raster = str(data_dir / "slope.tif") + vector = str(data_dir / "featurecollection.geojson") runner = CliRunner() result = runner.invoke( pointquery, diff --git a/tests/test_io.py b/tests/test_io.py index d70c3e2..824a503 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -1,8 +1,8 @@ import json -import os -import sys +from pathlib import Path import fiona +import numpy as np import pytest import rasterio from shapely.geometry import shape @@ -18,12 +18,9 @@ window_bounds, ) -sys.path.append(os.path.dirname(os.path.abspath(__file__))) -DATA = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data") -polygons = os.path.join(DATA, "polygons.shp") -raster = os.path.join(DATA, "slope.tif") - -import numpy as np +data_dir = Path(__file__).parent / "data" +polygons = data_dir / "polygons.shp" +raster = data_dir / "slope.tif" arr = np.array([[1, 1, 1], [1, 1, 1], [1, 1, 1]]) @@ -59,12 +56,12 @@ def test_fiona_path(): def test_layer_index(): - layer = fiona.listlayers(DATA).index("polygons") - assert list(read_features(DATA, layer=layer)) == target_features + layer = fiona.listlayers(data_dir).index("polygons") + assert list(read_features(data_dir, layer=layer)) == target_features def test_layer_name(): - assert list(read_features(DATA, layer="polygons")) == target_features + assert list(read_features(data_dir, layer="polygons")) == target_features def test_path_unicode(): diff --git a/tests/test_utils.py b/tests/test_utils.py index 3677cea..0f88d35 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,5 +1,4 @@ -import os -import sys +from pathlib import Path import pytest from shapely.geometry import LineString @@ -13,21 +12,20 @@ stats_to_csv, ) -sys.path.append(os.path.dirname(os.path.abspath(__file__))) -DATA = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data") -raster = os.path.join(DATA, "slope.tif") +data_dir = Path(__file__).parent / "data" +raster = data_dir / "slope.tif" def test_csv(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, stats="*") csv = stats_to_csv(stats) assert csv.split()[0] == ",".join(sorted(VALID_STATS)) def test_categorical_csv(): - polygons = os.path.join(DATA, "polygons.shp") - categorical_raster = os.path.join(DATA, "slope_classes.tif") + polygons = data_dir / "polygons.shp" + categorical_raster = data_dir / "slope_classes.tif" stats = zonal_stats(polygons, categorical_raster, categorical=True) csv = stats_to_csv(stats) assert csv.split()[0] == "1.0,2.0,5.0" diff --git a/tests/test_zonal.py b/tests/test_zonal.py index 9b0c514..888d2ba 100644 --- a/tests/test_zonal.py +++ b/tests/test_zonal.py @@ -1,7 +1,6 @@ # test zonal stats import json -import os -import sys +from pathlib import Path import numpy as np import pytest @@ -14,14 +13,12 @@ from rasterstats.io import read_featurecollection, read_features from rasterstats.utils import VALID_STATS -sys.path.append(os.path.dirname(os.path.abspath(__file__))) - -DATA = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data") -raster = os.path.join(DATA, "slope.tif") +data_dir = Path(__file__).parent / "data" +raster = data_dir / "slope.tif" def test_main(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster) for key in ["count", "min", "max", "mean"]: assert key in stats[0] @@ -33,7 +30,7 @@ def test_main(): # remove after band_num alias is removed def test_band_alias(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats_a = zonal_stats(polygons, raster) stats_b = zonal_stats(polygons, raster, band=1) with pytest.deprecated_call(): @@ -42,14 +39,14 @@ def test_band_alias(): def test_zonal_global_extent(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster) global_stats = zonal_stats(polygons, raster, global_src_extent=True) assert stats == global_stats def test_zonal_nodata(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, nodata=0) assert len(stats) == 2 assert stats[0]["count"] == 75 @@ -57,29 +54,24 @@ def test_zonal_nodata(): def test_doesnt_exist(): - nonexistent = os.path.join(DATA, "DOESNOTEXIST.shp") + nonexistent = data_dir / "DOESNOTEXIST.shp" with pytest.raises(ValueError): zonal_stats(nonexistent, raster) def test_nonsense(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" with pytest.raises(ValueError): zonal_stats("blaghrlargh", raster) with pytest.raises(OSError): zonal_stats(polygons, "blercherlerch") with pytest.raises(ValueError): - zonal_stats( - [ - "blaghrlargh", - ], - raster, - ) + zonal_stats(["blaghrlargh"], raster) # Different geometry types def test_points(): - points = os.path.join(DATA, "points.shp") + points = data_dir / "points.shp" stats = zonal_stats(points, raster) # three features assert len(stats) == 3 @@ -90,8 +82,8 @@ def test_points(): def test_points_categorical(): - points = os.path.join(DATA, "points.shp") - categorical_raster = os.path.join(DATA, "slope_classes.tif") + points = data_dir / "points.shp" + categorical_raster = data_dir / "slope_classes.tif" stats = zonal_stats(points, categorical_raster, categorical=True) # three features assert len(stats) == 3 @@ -101,7 +93,7 @@ def test_points_categorical(): def test_lines(): - lines = os.path.join(DATA, "lines.shp") + lines = data_dir / "lines.shp" stats = zonal_stats(lines, raster) assert len(stats) == 2 assert stats[0]["count"] == 58 @@ -110,14 +102,14 @@ def test_lines(): # Test multigeoms def test_multipolygons(): - multipolygons = os.path.join(DATA, "multipolygons.shp") + multipolygons = data_dir / "multipolygons.shp" stats = zonal_stats(multipolygons, raster) assert len(stats) == 1 assert stats[0]["count"] == 125 def test_multilines(): - multilines = os.path.join(DATA, "multilines.shp") + multilines = data_dir / "multilines.shp" stats = zonal_stats(multilines, raster) assert len(stats) == 1 # can differ slightly based on platform/gdal version @@ -125,15 +117,15 @@ def test_multilines(): def test_multipoints(): - multipoints = os.path.join(DATA, "multipoints.shp") + multipoints = data_dir / "multipoints.shp" stats = zonal_stats(multipoints, raster) assert len(stats) == 1 assert stats[0]["count"] == 3 def test_categorical(): - polygons = os.path.join(DATA, "polygons.shp") - categorical_raster = os.path.join(DATA, "slope_classes.tif") + polygons = data_dir / "polygons.shp" + categorical_raster = data_dir / "slope_classes.tif" stats = zonal_stats(polygons, categorical_raster, categorical=True) assert len(stats) == 2 assert stats[0][1.0] == 75 @@ -141,8 +133,8 @@ def test_categorical(): def test_categorical_map(): - polygons = os.path.join(DATA, "polygons.shp") - categorical_raster = os.path.join(DATA, "slope_classes.tif") + polygons = data_dir / "polygons.shp" + categorical_raster = data_dir / "slope_classes.tif" catmap = {5.0: "cat5"} stats = zonal_stats( polygons, categorical_raster, categorical=True, category_map=catmap @@ -154,14 +146,14 @@ def test_categorical_map(): def test_specify_stats_list(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, stats=["min", "max"]) assert sorted(stats[0].keys()) == sorted(["min", "max"]) assert "count" not in list(stats[0].keys()) def test_specify_all_stats(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, stats="ALL") assert sorted(stats[0].keys()) == sorted(VALID_STATS) stats = zonal_stats(polygons, raster, stats="*") @@ -169,26 +161,26 @@ def test_specify_all_stats(): def test_specify_stats_string(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, stats="min max") assert sorted(stats[0].keys()) == sorted(["min", "max"]) assert "count" not in list(stats[0].keys()) def test_specify_stats_invalid(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" with pytest.raises(ValueError): zonal_stats(polygons, raster, stats="foo max") def test_optional_stats(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, stats="min max sum majority median std") assert stats[0]["min"] <= stats[0]["median"] <= stats[0]["max"] def test_range(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, stats="range min max") for stat in stats: assert stat["range"] == stat["max"] - stat["min"] @@ -200,8 +192,8 @@ def test_range(): def test_nodata(): - polygons = os.path.join(DATA, "polygons.shp") - categorical_raster = os.path.join(DATA, "slope_classes.tif") + polygons = data_dir / "polygons.shp" + categorical_raster = data_dir / "slope_classes.tif" stats = zonal_stats( polygons, categorical_raster, stats="*", categorical=True, nodata=1.0 ) @@ -213,15 +205,15 @@ def test_nodata(): def test_dataset_mask(): - polygons = os.path.join(DATA, "polygons.shp") - raster = os.path.join(DATA, "dataset_mask.tif") + polygons = data_dir / "polygons.shp" + raster = data_dir / "dataset_mask.tif" stats = zonal_stats(polygons, raster, stats="*") assert stats[0]["count"] == 75 assert stats[1]["count"] == 0 def test_partial_overlap(): - polygons = os.path.join(DATA, "polygons_partial_overlap.shp") + polygons = data_dir / "polygons_partial_overlap.shp" stats = zonal_stats(polygons, raster, stats="count") for res in stats: # each polygon should have at least a few pixels overlap @@ -229,7 +221,7 @@ def test_partial_overlap(): def test_no_overlap(): - polygons = os.path.join(DATA, "polygons_no_overlap.shp") + polygons = data_dir / "polygons_no_overlap.shp" stats = zonal_stats(polygons, raster, stats="count") for res in stats: # no polygon should have any overlap @@ -237,7 +229,7 @@ def test_no_overlap(): def test_all_touched(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, all_touched=True) assert stats[0]["count"] == 95 # 75 if ALL_TOUCHED=False assert stats[1]["count"] == 73 # 50 if ALL_TOUCHED=False @@ -245,7 +237,7 @@ def test_all_touched(): def test_ndarray_without_affine(): with rasterio.open(raster) as src: - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" with pytest.raises(ValueError): zonal_stats(polygons, src.read(1)) # needs affine kwarg @@ -268,7 +260,7 @@ def test_ndarray(): arr = src.read(1) affine = src.transform - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, arr, affine=affine) stats2 = zonal_stats(polygons, raster) for s1, s2 in zip(stats, stats2): @@ -278,7 +270,7 @@ def test_ndarray(): assert stats[0]["count"] == 75 assert stats[1]["count"] == 50 - points = os.path.join(DATA, "points.shp") + points = data_dir / "points.shp" stats = zonal_stats(points, arr, affine=affine) assert stats == zonal_stats(points, raster) assert sum([x["count"] for x in stats]) == 3 @@ -287,7 +279,7 @@ def test_ndarray(): def test_alias(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster) with pytest.deprecated_call(): stats2 = raster_stats(polygons, raster) @@ -295,7 +287,7 @@ def test_alias(): def test_add_stats(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" def mymean(x): return np.ma.mean(x) @@ -306,7 +298,7 @@ def mymean(x): def test_add_stats_prop(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" def mymean_prop(x, prop): return np.ma.mean(x) * prop["id"] @@ -316,7 +308,7 @@ def mymean_prop(x, prop): assert stats[i]["mymean_prop"] == stats[i]["mean"] * (i + 1) def test_add_stats_prop_and_array(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" def mymean_prop_and_array(x, prop, rv_array): # confirm that the object exists and is accessible. @@ -329,7 +321,7 @@ def mymean_prop_and_array(x, prop, rv_array): def test_mini_raster(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, raster_out=True) stats2 = zonal_stats( polygons, @@ -343,7 +335,7 @@ def test_mini_raster(): def test_percentile_good(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, stats="median percentile_50 percentile_90") assert "percentile_50" in stats[0].keys() assert "percentile_90" in stats[0].keys() @@ -355,7 +347,7 @@ def test_zone_func_has_return(): def example_zone_func(zone_arr): return np.ma.masked_array(np.full(zone_arr.shape, 1)) - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, zone_func=example_zone_func) assert stats[0]["max"] == 1 assert stats[0]["min"] == 1 @@ -366,7 +358,7 @@ def test_zone_func_good(): def example_zone_func(zone_arr): zone_arr[:] = 0 - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, zone_func=example_zone_func) assert stats[0]["max"] == 0 assert stats[0]["min"] == 0 @@ -375,14 +367,14 @@ def example_zone_func(zone_arr): def test_zone_func_bad(): not_a_func = "jar jar binks" - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" with pytest.raises(TypeError): zonal_stats(polygons, raster, zone_func=not_a_func) def test_percentile_nodata(): - polygons = os.path.join(DATA, "polygons.shp") - categorical_raster = os.path.join(DATA, "slope_classes.tif") + polygons = data_dir / "polygons.shp" + categorical_raster = data_dir / "slope_classes.tif" # By setting nodata to 1, one of our polygons is within the raster extent # but has an empty masked array stats = zonal_stats(polygons, categorical_raster, stats=["percentile_90"], nodata=1) @@ -391,13 +383,13 @@ def test_percentile_nodata(): def test_percentile_bad(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" with pytest.raises(ValueError): zonal_stats(polygons, raster, stats="percentile_101") def test_json_serializable(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats( polygons, raster, stats=VALID_STATS + ["percentile_90"], categorical=True ) @@ -409,7 +401,7 @@ def test_json_serializable(): def test_direct_features_collections(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" features = read_features(polygons) collection = read_featurecollection(polygons) @@ -421,8 +413,8 @@ def test_direct_features_collections(): def test_all_nodata(): - polygons = os.path.join(DATA, "polygons.shp") - raster = os.path.join(DATA, "all_nodata.tif") + polygons = data_dir / "polygons.shp" + raster = data_dir / "all_nodata.tif" stats = zonal_stats(polygons, raster, stats=["nodata", "count"]) assert stats[0]["nodata"] == 75 assert stats[0]["count"] == 0 @@ -431,8 +423,8 @@ def test_all_nodata(): def test_some_nodata(): - polygons = os.path.join(DATA, "polygons.shp") - raster = os.path.join(DATA, "slope_nodata.tif") + polygons = data_dir / "polygons.shp" + raster = data_dir / "slope_nodata.tif" stats = zonal_stats(polygons, raster, stats=["nodata", "count"]) assert stats[0]["nodata"] == 36 assert stats[0]["count"] == 39 @@ -458,8 +450,8 @@ def test_nan_nodata(): def test_some_nodata_ndarray(): - polygons = os.path.join(DATA, "polygons.shp") - raster = os.path.join(DATA, "slope_nodata.tif") + polygons = data_dir / "polygons.shp" + raster = data_dir / "slope_nodata.tif" with rasterio.open(raster) as src: arr = src.read(1) affine = src.transform @@ -483,7 +475,7 @@ def test_transform(): with rasterio.open(raster) as src: arr = src.read(1) affine = src.transform - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, arr, affine=affine) with pytest.deprecated_call(): @@ -492,7 +484,7 @@ def test_transform(): def test_prefix(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" stats = zonal_stats(polygons, raster, prefix="TEST") for key in ["count", "min", "max", "mean"]: assert key not in stats[0] @@ -501,7 +493,7 @@ def test_prefix(): def test_geojson_out(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" features = zonal_stats(polygons, raster, geojson_out=True) for feature in features: assert feature["type"] == "Feature" @@ -527,7 +519,7 @@ def test_geojson_out_with_no_properties(): # remove when copy_properties alias is removed def test_copy_properties_warn(): - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" # run once to trigger any other unrelated deprecation warnings # so the test does not catch them instead stats_a = zonal_stats(polygons, raster) @@ -567,7 +559,7 @@ def test_nan_counts(): def test_geodataframe_zonal(): gpd = pytest.importorskip("geopandas") - polygons = os.path.join(DATA, "polygons.shp") + polygons = data_dir / "polygons.shp" df = gpd.read_file(polygons) if not hasattr(df, "__geo_interface__"): pytest.skip("This version of geopandas doesn't support df.__geo_interface__")