From 57a5df6ce34bbbe0ad710b61f07aa0a321f792ac Mon Sep 17 00:00:00 2001 From: krokosik Date: Tue, 29 Jul 2025 15:29:12 +0200 Subject: [PATCH 1/3] Remove for loops in deviations function body --- adaptive/learner/learner2D.py | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/adaptive/learner/learner2D.py b/adaptive/learner/learner2D.py index cb179a22..ea1e82af 100644 --- a/adaptive/learner/learner2D.py +++ b/adaptive/learner/learner2D.py @@ -33,7 +33,7 @@ # Learner2D and helper functions. -def deviations(ip: LinearNDInterpolator) -> list[np.ndarray]: +def deviations(ip: LinearNDInterpolator) -> np.ndarray: """Returns the deviation of the linear estimate. Is useful when defining custom loss functions. @@ -55,18 +55,14 @@ def deviations(ip: LinearNDInterpolator) -> list[np.ndarray]: vs = values[simplices] gs = gradients[simplices] - def deviation(p, v, g): - dev = 0 - for j in range(3): - vest = v[:, j, None] + ( - (p[:, :, :] - p[:, j, None, :]) * g[:, j, None, :] - ).sum(axis=-1) - dev += abs(vest - v).max(axis=1) - return dev - - n_levels = vs.shape[2] - devs = [deviation(p, vs[:, :, i], gs[:, :, i]) for i in range(n_levels)] - return devs + p = np.expand_dims(p, axis=2) + + p_diff = p[:, None] - p[:, :, None] + p_diff_scaled = p_diff * gs[:, :, None] + vest = vs[:, :, None] + p_diff_scaled.sum(axis=-1) + devs = np.sum(np.max(np.abs(vest - vs[:, None]), axis=2), axis=1) + + return np.swapaxes(devs, 0, 1) def areas(ip: LinearNDInterpolator) -> np.ndarray: From 5f47fcd962f196f047c3f4cd3efc9e426ed19aa8 Mon Sep 17 00:00:00 2001 From: krokosik Date: Tue, 29 Jul 2025 15:32:03 +0200 Subject: [PATCH 2/3] Fix tutorial link in readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index dcfd4ff8..e3a83a68 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ With minimal code, you can perform evaluations on a computing cluster, display l Adaptive is most efficient for computations where each function evaluation takes at least ≈50ms due to the overhead of selecting potentially interesting points. -To see Adaptive in action, try the [example notebook on Binder](https://mybinder.org/v2/gh/python-adaptive/adaptive/main?filepath=example-notebook.ipynb) or explore the [tutorial on Read the Docs](https://adaptive.readthedocs.io/en/latest/tutorial/tutorial.html). +To see Adaptive in action, try the [example notebook on Binder](https://mybinder.org/v2/gh/python-adaptive/adaptive/main?filepath=example-notebook.ipynb) or explore the [tutorial on Read the Docs](https://adaptive.readthedocs.io/en/latest/tutorial/tutorial). From 9a75cf511cdbfe539069f752abb6711cff011483 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Tue, 19 Aug 2025 15:42:39 -0700 Subject: [PATCH 3/3] Add test for Learner2D with vector-valued functions - Add comprehensive test case for vector-valued functions in test_learners.py - Update deviations function docstring to reflect correct return type (numpy.ndarray) - Test verifies that deviations calculation works correctly with multi-dimensional outputs - Addresses review comment from @akhmerov about missing test coverage --- adaptive/learner/learner2D.py | 2 +- adaptive/tests/test_learners.py | 63 +++++++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+), 1 deletion(-) diff --git a/adaptive/learner/learner2D.py b/adaptive/learner/learner2D.py index ea1e82af..4d738a71 100644 --- a/adaptive/learner/learner2D.py +++ b/adaptive/learner/learner2D.py @@ -44,7 +44,7 @@ def deviations(ip: LinearNDInterpolator) -> np.ndarray: Returns ------- - deviations : list + deviations : numpy.ndarray The deviation per triangle. """ values = ip.values / (np.ptp(ip.values, axis=0).max() or 1) diff --git a/adaptive/tests/test_learners.py b/adaptive/tests/test_learners.py index 16187871..dbd5c63d 100644 --- a/adaptive/tests/test_learners.py +++ b/adaptive/tests/test_learners.py @@ -279,6 +279,69 @@ def f(x): simple_run(learner, 10) +def test_learner2d_vector_valued_function(): + """Test that Learner2D handles vector-valued functions correctly. + + This test verifies that the deviations function works properly when + the function returns a vector (array/list) of values instead of a scalar. + """ + + from adaptive import Learner2D + + def vector_function(xy): + """A 2D function that returns a 3-element vector.""" + x, y = xy + return [x + y, x * y, x - y] # Returns 3-element vector + + # Create learner with vector-valued function + learner = Learner2D(vector_function, bounds=[(-1, 1), (-1, 1)]) + + # Add some initial points + points = [ + (0.0, 0.0), + (1.0, 0.0), + (0.0, 1.0), + (1.0, 1.0), + (0.5, 0.5), + (-0.5, 0.5), + (0.5, -0.5), + (-1.0, -1.0), + ] + + for point in points: + value = vector_function(point) + learner.tell(point, value) + + # Run the learner to trigger deviations calculation + # This should not raise any errors + learner.ask(10) + + # Verify that the interpolator is created (ip is a property that may return a function) + assert hasattr(learner, "ip") + + # Check the internal interpolator if it exists + if hasattr(learner, "_ip") and learner._ip is not None: + # Check that values have the correct shape + assert learner._ip.values.shape[1] == 3 # 3 output dimensions + + # Test that we can evaluate the interpolated function + test_point = (0.25, 0.25) + ip_func = learner.interpolator(scaled=True) # Get the interpolator function + if ip_func is not None: + interpolated_value = ip_func(test_point) + assert len(interpolated_value) == 3 + + # Run more iterations to ensure deviations are computed correctly + simple_run(learner, 20) + + # Final verification + assert len(learner.data) > len(points) # Learner added more points + + # Check that all values in data are vectors + for _point, value in learner.data.items(): + assert len(value) == 3, f"Expected 3-element vector, got {value}" + + @run_with(Learner1D, Learner2D, LearnerND, SequenceLearner, AverageLearner1D) def test_adding_existing_data_is_idempotent(learner_type, f, learner_kwargs): """Adding already existing data is an idempotent operation.