Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ With minimal code, you can perform evaluations on a computing cluster, display l

Adaptive is most efficient for computations where each function evaluation takes at least ≈50ms due to the overhead of selecting potentially interesting points.

To see Adaptive in action, try the [example notebook on Binder](https://mybinder.org/v2/gh/python-adaptive/adaptive/main?filepath=example-notebook.ipynb) or explore the [tutorial on Read the Docs](https://adaptive.readthedocs.io/en/latest/tutorial/tutorial.html).
To see Adaptive in action, try the [example notebook on Binder](https://mybinder.org/v2/gh/python-adaptive/adaptive/main?filepath=example-notebook.ipynb) or explore the [tutorial on Read the Docs](https://adaptive.readthedocs.io/en/latest/tutorial/tutorial).

<!-- summary-end -->

Expand Down
24 changes: 10 additions & 14 deletions adaptive/learner/learner2D.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
# Learner2D and helper functions.


def deviations(ip: LinearNDInterpolator) -> list[np.ndarray]:
def deviations(ip: LinearNDInterpolator) -> np.ndarray:
"""Returns the deviation of the linear estimate.

Is useful when defining custom loss functions.
Expand All @@ -44,7 +44,7 @@ def deviations(ip: LinearNDInterpolator) -> list[np.ndarray]:

Returns
-------
deviations : list
deviations : numpy.ndarray
The deviation per triangle.
"""
values = ip.values / (np.ptp(ip.values, axis=0).max() or 1)
Expand All @@ -55,18 +55,14 @@ def deviations(ip: LinearNDInterpolator) -> list[np.ndarray]:
vs = values[simplices]
gs = gradients[simplices]

def deviation(p, v, g):
dev = 0
for j in range(3):
vest = v[:, j, None] + (
(p[:, :, :] - p[:, j, None, :]) * g[:, j, None, :]
).sum(axis=-1)
dev += abs(vest - v).max(axis=1)
return dev

n_levels = vs.shape[2]
devs = [deviation(p, vs[:, :, i], gs[:, :, i]) for i in range(n_levels)]
return devs
p = np.expand_dims(p, axis=2)

p_diff = p[:, None] - p[:, :, None]
p_diff_scaled = p_diff * gs[:, :, None]
vest = vs[:, :, None] + p_diff_scaled.sum(axis=-1)
devs = np.sum(np.max(np.abs(vest - vs[:, None]), axis=2), axis=1)

return np.swapaxes(devs, 0, 1)


def areas(ip: LinearNDInterpolator) -> np.ndarray:
Expand Down
63 changes: 63 additions & 0 deletions adaptive/tests/test_learners.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,69 @@ def f(x):
simple_run(learner, 10)


def test_learner2d_vector_valued_function():
"""Test that Learner2D handles vector-valued functions correctly.

This test verifies that the deviations function works properly when
the function returns a vector (array/list) of values instead of a scalar.
"""

from adaptive import Learner2D

def vector_function(xy):
"""A 2D function that returns a 3-element vector."""
x, y = xy
return [x + y, x * y, x - y] # Returns 3-element vector

# Create learner with vector-valued function
learner = Learner2D(vector_function, bounds=[(-1, 1), (-1, 1)])

# Add some initial points
points = [
(0.0, 0.0),
(1.0, 0.0),
(0.0, 1.0),
(1.0, 1.0),
(0.5, 0.5),
(-0.5, 0.5),
(0.5, -0.5),
(-1.0, -1.0),
]

for point in points:
value = vector_function(point)
learner.tell(point, value)

# Run the learner to trigger deviations calculation
# This should not raise any errors
learner.ask(10)

# Verify that the interpolator is created (ip is a property that may return a function)
assert hasattr(learner, "ip")

# Check the internal interpolator if it exists
if hasattr(learner, "_ip") and learner._ip is not None:
# Check that values have the correct shape
assert learner._ip.values.shape[1] == 3 # 3 output dimensions

# Test that we can evaluate the interpolated function
test_point = (0.25, 0.25)
ip_func = learner.interpolator(scaled=True) # Get the interpolator function
if ip_func is not None:
interpolated_value = ip_func(test_point)
assert len(interpolated_value) == 3

# Run more iterations to ensure deviations are computed correctly
simple_run(learner, 20)

# Final verification
assert len(learner.data) > len(points) # Learner added more points

# Check that all values in data are vectors
for _point, value in learner.data.items():
assert len(value) == 3, f"Expected 3-element vector, got {value}"


@run_with(Learner1D, Learner2D, LearnerND, SequenceLearner, AverageLearner1D)
def test_adding_existing_data_is_idempotent(learner_type, f, learner_kwargs):
"""Adding already existing data is an idempotent operation.
Expand Down
Loading