Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,14 @@
# All configuration values have a default; values that are commented out
# serve to show the default.

import datetime

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import datetime

sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../'))
Expand All @@ -45,6 +46,7 @@
]

import sphinx_autodoc_typehints

# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']

Expand Down
10 changes: 5 additions & 5 deletions docs/cookbook/sampling.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ but rather want to extract a result at specific locations.
Examples include extracting training data for model calibration, or computing the result for
areas where validation data is available.

An important constraint is that most implementations assume that sampling is an operation
on relatively small areas, of for instance up to 512x512 pixels (but often much smaller).
An important constraint is that most implementations assume that sampling is an operation
on relatively small areas, of for instance up to 512x512 pixels (but often much smaller).
When extracting polygons with larger areas, it is recommended to look into running a separate job per 'sample'.
Some more important performance notices are mentioned later in the chapter, please read them carefully
Some more important performance notices are mentioned later in the chapter, please read them carefully
to get best results.

Sampling can be done for points or polygons:
Expand All @@ -23,9 +23,9 @@ public url, and to load it in openEO using {py:meth}`openeo.rest.connection.Conn

## Sampling at point locations

To sample point locations, the `openeo.rest.datacube.DataCube.aggregate_spatial` method can be used. The reducer can be a
To sample point locations, the `openeo.rest.datacube.DataCube.aggregate_spatial` method can be used. The reducer can be a
commonly supported reducer like `min`, `max` or `mean` and will receive only one value as input in most cases. Note that
in edge cases, a point can intersect with up to 4 pixels. If this is not desirable, it might be worth trying to align
in edge cases, a point can intersect with up to 4 pixels. If this is not desirable, it might be worth trying to align
points with pixel centers, which does require more advanced knowledge of the pixel grid of your data cube.

More information on `aggregate_spatial` is available [here](_aggregate-spatial-evi).
Expand Down
8 changes: 5 additions & 3 deletions examples/archive/udf/udf_modify_spatial.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import numpy as np
import xarray

from openeo.metadata import CollectionMetadata
from openeo.udf import XarrayDataCube
from openeo.udf.debug import inspect
from openeo.metadata import CollectionMetadata
import numpy as np


def apply_metadata(input_metadata:CollectionMetadata, context:dict) -> CollectionMetadata:

Expand Down Expand Up @@ -41,4 +43,4 @@ def apply_datacube(cube: XarrayDataCube, context: dict) -> XarrayDataCube:
predicted_cube = xarray.DataArray(predicted_array, dims=['bands', 'x', 'y'], coords=dict(x=coord_x, y=coord_y))


return XarrayDataCube(predicted_cube)
return XarrayDataCube(predicted_cube)
51 changes: 34 additions & 17 deletions openeo/processes.py
Original file line number Diff line number Diff line change
Expand Up @@ -2879,7 +2879,8 @@ def aggregate_spatial(data, geometries, reducer, target_dimension=UNSET, context
aggregated values (i.e. no-data). The spatial dimensions are replaced by a dimension of type 'geometries'
and if `target_dimension` is not `null`, a new dimension is added.
"""
return _process('aggregate_spatial',
return _process(
"aggregate_spatial",
data=data,
geometries=geometries,
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
Expand Down Expand Up @@ -2915,7 +2916,8 @@ def aggregate_spatial_window(data, reducer, size, boundary=UNSET, align=UNSET, c
labels will be set to the coordinate at the center of the window. The other dimension properties (name,
type and reference system) remain unchanged.
"""
return _process('aggregate_spatial_window',
return _process(
"aggregate_spatial_window",
data=data,
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
size=size,
Expand Down Expand Up @@ -2954,7 +2956,8 @@ def aggregate_temporal(data, intervals, reducer, labels=UNSET, dimension=UNSET,
system and resolution) remain unchanged, except for the resolution and dimension labels of the given
temporal dimension.
"""
return _process('aggregate_temporal',
return _process(
"aggregate_temporal",
data=data,
intervals=intervals,
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
Expand Down Expand Up @@ -3008,7 +3011,8 @@ def aggregate_temporal_period(data, period, reducer, dimension=UNSET, context=UN
the source data cube has just one dimension label `2020-01-05`, the process returns a data cube with just a
single dimension label (`2020-005`).
"""
return _process('aggregate_temporal_period',
return _process(
"aggregate_temporal_period",
data=data,
period=period,
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
Expand Down Expand Up @@ -3149,7 +3153,8 @@ def apply_dimension(data, process, dimension, target_dimension=UNSET, context=UN
incrementing integers starting from zero, - the resolution changes, and - the reference system is
undefined.
"""
return _process('apply_dimension',
return _process(
"apply_dimension",
data=data,
process=build_child_callback(process, parent_parameters=['data', 'context']),
dimension=dimension,
Expand Down Expand Up @@ -3207,7 +3212,8 @@ def apply_neighborhood(data, process, size, overlap=UNSET, context=UNSET) -> Pro
:return: A raster data cube with the newly computed values and the same dimensions. The dimension
properties (name, type, labels, reference system and resolution) remain unchanged.
"""
return _process('apply_neighborhood',
return _process(
"apply_neighborhood",
data=data,
process=build_child_callback(process, parent_parameters=['data', 'context']),
size=size,
Expand Down Expand Up @@ -3236,7 +3242,8 @@ def apply_polygon(data, polygons, process, mask_value=UNSET, context=UNSET) -> P
:return: A data cube with the newly computed values and the same dimensions. The dimension properties
(name, type, labels, reference system and resolution) remain unchanged.
"""
return _process('apply_polygon',
return _process(
"apply_polygon",
data=data,
polygons=polygons,
process=build_child_callback(process, parent_parameters=['data', 'context']),
Expand Down Expand Up @@ -3330,7 +3337,8 @@ def ard_normalized_radar_backscatter(data, elevation_model=UNSET, contributing_a
DEM-based local incidence angles in degrees. The data returned is CARD4L compliant with corresponding
metadata.
"""
return _process('ard_normalized_radar_backscatter',
return _process(
"ard_normalized_radar_backscatter",
data=data,
elevation_model=elevation_model,
contributing_area=contributing_area,
Expand Down Expand Up @@ -3385,7 +3393,8 @@ def ard_surface_reflectance(data, atmospheric_correction_method, cloud_detection
(optional): Contains coefficients used for terrain illumination correction are provided for each pixel.
The data returned is CARD4L compliant with corresponding metadata.
"""
return _process('ard_surface_reflectance',
return _process(
"ard_surface_reflectance",
data=data,
atmospheric_correction_method=atmospheric_correction_method,
cloud_detection_method=cloud_detection_method,
Expand Down Expand Up @@ -3425,7 +3434,8 @@ def array_apply(data, process, context=UNSET) -> ProcessBuilder:
:return: An array with the newly computed values. The number of elements are the same as for the original
array.
"""
return _process('array_apply',
return _process(
"array_apply",
data=data,
process=build_child_callback(process, parent_parameters=['x', 'index', 'label', 'context']),
context=context
Expand Down Expand Up @@ -3515,7 +3525,8 @@ def array_filter(data, condition, context=UNSET) -> ProcessBuilder:
:return: An array filtered by the specified condition. The number of elements are less than or equal
compared to the original array.
"""
return _process('array_filter',
return _process(
"array_filter",
data=data,
condition=build_child_callback(condition, parent_parameters=['x', 'index', 'label', 'context']),
context=context
Expand Down Expand Up @@ -4068,7 +4079,8 @@ def filter_labels(data, condition, dimension, context=UNSET) -> ProcessBuilder:
system and resolution) remain unchanged, except that the given dimension has less (or the same) dimension
labels.
"""
return _process('filter_labels',
return _process(
"filter_labels",
data=data,
condition=build_child_callback(condition, parent_parameters=['value', 'context']),
dimension=dimension,
Expand Down Expand Up @@ -4168,7 +4180,8 @@ def fit_curve(data, parameters, function, ignore_nodata=UNSET) -> ProcessBuilder

:return: An array with the optimal values for the parameters.
"""
return _process('fit_curve',
return _process(
"fit_curve",
data=data,
parameters=parameters,
function=build_child_callback(function, parent_parameters=['x', 'parameters']),
Expand Down Expand Up @@ -4716,7 +4729,8 @@ def merge_cubes(cube1, cube2, overlap_resolver=UNSET, context=UNSET) -> ProcessB
:return: The merged data cube. See the process description for details regarding the dimensions and
dimension properties (name, type, labels, reference system and resolution).
"""
return _process('merge_cubes',
return _process(
"merge_cubes",
cube1=cube1,
cube2=cube2,
overlap_resolver=(build_child_callback(overlap_resolver, parent_parameters=['x', 'y', 'context']) if overlap_resolver not in [None, UNSET] else overlap_resolver),
Expand Down Expand Up @@ -4915,7 +4929,8 @@ def predict_curve(parameters, function, dimension, labels=UNSET) -> ProcessBuild
:return: A data cube with the predicted values with the provided dimension `dimension` having as many
labels as provided through `labels`.
"""
return _process('predict_curve',
return _process(
"predict_curve",
parameters=parameters,
function=build_child_callback(function, parent_parameters=['x', 'parameters']),
dimension=dimension,
Expand Down Expand Up @@ -5019,7 +5034,8 @@ def reduce_dimension(data, reducer, dimension, context=UNSET) -> ProcessBuilder:
dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution)
for all other dimensions remain unchanged.
"""
return _process('reduce_dimension',
return _process(
"reduce_dimension",
data=data,
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
dimension=dimension,
Expand Down Expand Up @@ -5250,7 +5266,8 @@ def sar_backscatter(data, coefficient=UNSET, elevation_model=UNSET, mask=UNSET,
:return: Backscatter values corresponding to the chosen parametrization. The values are given in linear
scale.
"""
return _process('sar_backscatter',
return _process(
"sar_backscatter",
data=data,
coefficient=coefficient,
elevation_model=elevation_model,
Expand Down
9 changes: 9 additions & 0 deletions openeo/rest/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -1240,6 +1240,7 @@ def load_collection(
] = None,
max_cloud_cover: Optional[float] = None,
fetch_metadata: bool = True,
**kwargs,
) -> DataCube:
"""
Load a DataCube by collection id.
Expand All @@ -1262,6 +1263,9 @@ def load_collection(
:param properties: limit data by collection metadata property predicates.
See :py:func:`~openeo.rest.graph_building.collection_property` for easy construction of such predicates.
:param max_cloud_cover: shortcut to set maximum cloud cover ("eo:cloud_cover" collection property)
:param kwargs: additional backend-specific parameters to pass to ``load_collection``.
These allow leveraging backend-specific features not covered by the standard openEO API,
for example: ``nodata`` (force specific nodata value), ``target_crs`` (output CRS), etc.
:return: a datacube containing the requested data

.. versionadded:: 0.13.0
Expand All @@ -1276,6 +1280,10 @@ def load_collection(

.. versionchanged:: 0.37.0
Argument ``spatial_extent``: add support for passing a Shapely geometry or a local path to a GeoJSON file.

.. versionadded:: 0.48.0
added ``**kwargs`` for additional backend-specific parameters.

"""
return DataCube.load_collection(
collection_id=collection_id,
Expand All @@ -1286,6 +1294,7 @@ def load_collection(
properties=properties,
max_cloud_cover=max_cloud_cover,
fetch_metadata=fetch_metadata,
**kwargs,
)

# TODO: remove this #100 #134 0.4.10
Expand Down
11 changes: 11 additions & 0 deletions openeo/rest/datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ def load_collection(
Dict[str, Union[PGNode, typing.Callable]], List[CollectionProperty], CollectionProperty, None
] = None,
max_cloud_cover: Optional[float] = None,
**kwargs,
) -> DataCube:
"""
Create a new Raster Data cube.
Expand All @@ -199,6 +200,9 @@ def load_collection(
:param properties: limit data by metadata property predicates.
See :py:func:`~openeo.rest.graph_building.collection_property` for easy construction of such predicates.
:param max_cloud_cover: shortcut to set maximum cloud cover ("eo:cloud_cover" collection property)
:param kwargs: additional backend-specific parameters to pass to ``load_collection``.
These allow leveraging backend-specific features not covered by the standard openEO API,
for example: ``nodata`` (force specific nodata value), ``target_crs`` (output CRS), etc.
:return: new DataCube containing the collection

.. versionchanged:: 0.13.0
Expand All @@ -213,6 +217,10 @@ def load_collection(

.. versionchanged:: 0.37.0
Argument ``spatial_extent``: add support for passing a Shapely geometry or a local path to a GeoJSON file.

.. versionadded:: 0.48.0
added ``**kwargs`` for additional backend-specific parameters.

"""
if temporal_extent:
temporal_extent = cls._get_temporal_extent(extent=temporal_extent)
Expand Down Expand Up @@ -255,6 +263,9 @@ def load_collection(
if properties is not None:
arguments["properties"] = properties

# Add any additional backend-specific parameters
arguments.update(kwargs)

pg = PGNode(
process_id='load_collection',
arguments=arguments
Expand Down
2 changes: 1 addition & 1 deletion openeo/udf/udf_signatures.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
"""
# Note: this module was initially developed under the ``openeo-udf`` project (https://github.com/Open-EO/openeo-udf)

from deprecated import deprecated
import xarray
from deprecated import deprecated
from pandas import Series

from openeo.metadata import CollectionMetadata
Expand Down
16 changes: 6 additions & 10 deletions tests/internal/processes/test_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,8 +272,7 @@ def test_render_process_graph_callback_wrapping():

renderer = PythonRenderer(optional_default="UNSET")
src = renderer.render_process(process, width=80)
assert src == dedent(
'''\
assert src == dedent('''\
def apply_dimension(data, dimension, process):
"""
Apply
Expand All @@ -284,12 +283,11 @@ def apply_dimension(data, dimension, process):

:return: Data cube
"""
return _process('apply_dimension',
return _process('apply_dimension',
data=data,
dimension=dimension,
process=build_child_callback(process, parent_parameters=['data'])
)'''
)
)''')


def test_render_process_graph_optional_callback():
Expand Down Expand Up @@ -321,8 +319,7 @@ def test_render_process_graph_optional_callback():

renderer = PythonRenderer(optional_default="UNSET")
src = renderer.render_process(process)
assert src == dedent(
'''\
assert src == dedent('''\
def apply(data, process=UNSET):
"""
Apply
Expand All @@ -332,11 +329,10 @@ def apply(data, process=UNSET):

:return: Data cube
"""
return _process('apply',
return _process('apply',
data=data,
process=(build_child_callback(process, parent_parameters=['data']) if process not in [None, UNSET] else process)
)'''
)
)''')


def test_collect_processes_basic(tmp_path, test_data):
Expand Down
Loading