From a0a6c8d990e224007c1de7cba7af230a0989ded8 Mon Sep 17 00:00:00 2001 From: stijn Date: Fri, 17 Oct 2025 15:45:27 +0200 Subject: [PATCH 1/7] Add initial support for scikit-optimize minimize methods (`skopt`) --- kernel_tuner/interface.py | 5 +++ kernel_tuner/strategies/common.py | 4 +- kernel_tuner/strategies/skopt.py | 72 +++++++++++++++++++++++++++++++ 3 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 kernel_tuner/strategies/skopt.py diff --git a/kernel_tuner/interface.py b/kernel_tuner/interface.py index 32e91c86..f6dca4cc 100644 --- a/kernel_tuner/interface.py +++ b/kernel_tuner/interface.py @@ -65,6 +65,7 @@ pyatf_strategies, random_sample, simulated_annealing, + skopt ) from kernel_tuner.strategies.wrapper import OptAlgWrapper @@ -82,6 +83,7 @@ "mls": mls, "pso": pso, "simulated_annealing": simulated_annealing, + "skopt": skopt, "firefly_algorithm": firefly_algorithm, "bayes_opt": bayes_opt, "pyatf_strategies": pyatf_strategies, @@ -394,6 +396,7 @@ def __deepcopy__(self, _): * "pso" particle swarm optimization * "random_sample" takes a random sample of the search space * "simulated_annealing" simulated annealing strategy + * "skopt" uses the minimization methods from `skopt` Strategy-specific parameters and options are explained under strategy_options. @@ -594,6 +597,7 @@ def tune_kernel( kernelsource = core.KernelSource(kernel_name, kernel_source, lang, defines) + print("block_size_names", block_size_names) _check_user_input(kernel_name, kernelsource, arguments, block_size_names) # default objective if none is specified @@ -676,6 +680,7 @@ def preprocess_cache(filepath): # create search space tuning_options.restrictions_unmodified = deepcopy(restrictions) + print(searchspace_construction_options) searchspace = Searchspace(tune_params, restrictions, runner.dev.max_threads, **searchspace_construction_options) restrictions = searchspace._modified_restrictions tuning_options.restrictions = restrictions diff --git a/kernel_tuner/strategies/common.py b/kernel_tuner/strategies/common.py index 9ffe999b..b51274ce 100644 --- a/kernel_tuner/strategies/common.py +++ b/kernel_tuner/strategies/common.py @@ -73,6 +73,7 @@ def __init__( snap=True, return_invalid=False, return_raw=None, + invalid_value=sys.float_info.max, ): """An abstract method to handle evaluation of configurations. @@ -100,6 +101,7 @@ def __init__( self.return_raw = f"{tuning_options['objective']}s" self.results = [] self.budget_spent_fraction = 0.0 + self.invalid_return_value = invalid_value def __call__(self, x, check_restrictions=True): @@ -168,7 +170,7 @@ def __call__(self, x, check_restrictions=True): else: # this is not a valid configuration, replace with float max if needed if not self.return_invalid: - return_value = sys.float_info.max + return_value = self.invalid_return_value # include raw data in return if requested if self.return_raw is not None: diff --git a/kernel_tuner/strategies/skopt.py b/kernel_tuner/strategies/skopt.py new file mode 100644 index 00000000..07d79bac --- /dev/null +++ b/kernel_tuner/strategies/skopt.py @@ -0,0 +1,72 @@ +"""The strategy that uses a minimizer method for searching through the parameter space.""" + +from kernel_tuner.util import StopCriterionReached +from kernel_tuner.searchspace import Searchspace +from kernel_tuner.strategies.common import ( + CostFunc, + get_options, + scale_from_params, + get_strategy_docstring, +) + +supported_methods = ["forest", "gbrt", "gp", "dummy"] + +_options = dict( + method=(f"Local optimization algorithm to use, choose any from {supported_methods}", "gp"), + options=("Options passed to the skopt method as kwargs.", dict()), + popsize=("Number of initial samples. If `None`, let skopt choose the initial population", None), + maxiter=("Maximum number of times to repeat the method until the budget is exhausted.", 1), +) + +def tune(searchspace: Searchspace, runner, tuning_options): + import skopt + + cost_func = CostFunc(searchspace, tuning_options, runner, scaling=True, invalid_value=1e9) + bounds, _, eps = cost_func.get_bounds_x0_eps() + + method, skopt_options, popsize, maxiter = get_options(tuning_options.strategy_options, _options) + + # Get maximum number of evaluations + max_fevals = searchspace.size + if "max_fevals" in tuning_options: + max_fevals = min(tuning_options["max_fevals"], max_fevals) + + # Set the maximum number of calls to 100 times the maximum number of evaluations. + # Not all calls by skopt will result in an evaluation, due to restrictions or + # since different calls might map to the same configuration. + if "n_calls" not in skopt_options: + skopt_options["n_calls"] = 100 * max_fevals + + # If the initial population size is specified, we select `popsize` samples + # from the search space. This is more efficient than letting skopt select + # the samples as it is not aware of restrictions. + if popsize: + x0 = searchspace.get_random_sample(min(popsize, max_fevals)) + skopt_options["x0"] = [list(scale_from_params(x, searchspace.tune_params, eps)) for x in x0] + + + opt_result = None + + try: + for _ in range(maxiter): + if method == "dummy": + opt_result = skopt.dummy_minimize(cost_func, bounds, **skopt_options) + elif method == "forest": + opt_result = skopt.forest_minimize(cost_func, bounds, **skopt_options) + elif method == "gp": + opt_result = skopt.gp_minimize(cost_func, bounds, **skopt_options) + elif method == "gbrt": + opt_result = skopt.gbrt_minimize(cost_func, bounds, **skopt_options) + else: + raise ValueError(f"invalid skopt method: {method}") + except StopCriterionReached as e: + if tuning_options.verbose: + print(e) + + if opt_result and tuning_options.verbose: + print(opt_result.message) + + return cost_func.results + + +tune.__doc__ = get_strategy_docstring("skopt minimize", _options) From 632df23ac8b6bde4bdedf2d8d73654585d01d0a0 Mon Sep 17 00:00:00 2001 From: stijn Date: Thu, 23 Oct 2025 13:49:29 +0200 Subject: [PATCH 2/7] Add `space_constraint` option for `skopt` strategy --- kernel_tuner/strategies/skopt.py | 40 ++++++++++++++++++-------------- 1 file changed, 23 insertions(+), 17 deletions(-) diff --git a/kernel_tuner/strategies/skopt.py b/kernel_tuner/strategies/skopt.py index 07d79bac..20c83afe 100644 --- a/kernel_tuner/strategies/skopt.py +++ b/kernel_tuner/strategies/skopt.py @@ -5,25 +5,23 @@ from kernel_tuner.strategies.common import ( CostFunc, get_options, - scale_from_params, + snap_to_nearest_config, get_strategy_docstring, ) supported_methods = ["forest", "gbrt", "gp", "dummy"] _options = dict( - method=(f"Local optimization algorithm to use, choose any from {supported_methods}", "gp"), - options=("Options passed to the skopt method as kwargs.", dict()), - popsize=("Number of initial samples. If `None`, let skopt choose the initial population", None), - maxiter=("Maximum number of times to repeat the method until the budget is exhausted.", 1), + method=(f"Local optimization algorithm to use, choose any from {supported_methods}", "gp"), + options=("Options passed to the skopt method as kwargs.", dict()), + popsize=("Number of initial samples. If `None`, let skopt choose the initial population", None), + maxiter=("Maximum number of times to repeat the method until the budget is exhausted.", 1), ) + def tune(searchspace: Searchspace, runner, tuning_options): import skopt - cost_func = CostFunc(searchspace, tuning_options, runner, scaling=True, invalid_value=1e9) - bounds, _, eps = cost_func.get_bounds_x0_eps() - method, skopt_options, popsize, maxiter = get_options(tuning_options.strategy_options, _options) # Get maximum number of evaluations @@ -32,8 +30,8 @@ def tune(searchspace: Searchspace, runner, tuning_options): max_fevals = min(tuning_options["max_fevals"], max_fevals) # Set the maximum number of calls to 100 times the maximum number of evaluations. - # Not all calls by skopt will result in an evaluation, due to restrictions or - # since different calls might map to the same configuration. + # Not all calls by skopt will result in an evaluation since different calls might + # map to the same configuration. if "n_calls" not in skopt_options: skopt_options["n_calls"] = 100 * max_fevals @@ -42,21 +40,29 @@ def tune(searchspace: Searchspace, runner, tuning_options): # the samples as it is not aware of restrictions. if popsize: x0 = searchspace.get_random_sample(min(popsize, max_fevals)) - skopt_options["x0"] = [list(scale_from_params(x, searchspace.tune_params, eps)) for x in x0] - + skopt_options["x0"] = [searchspace.get_param_indices(x) for x in x0] opt_result = None + tune_params_values = list(searchspace.tune_params.values()) + bounds = [(0, len(p) - 1) if len(p) > 1 else [0] for p in tune_params_values] + + cost_func = CostFunc(searchspace, tuning_options, runner) + objective = lambda x: cost_func(searchspace.get_param_config_from_param_indices(x)) + space_constraint = lambda x: searchspace.is_param_config_valid(searchspace.get_param_config_from_param_indices(x)) + + skopt_options["space_constraint"] = space_constraint + skopt_options["verbose"] = tuning_options.verbose try: for _ in range(maxiter): if method == "dummy": - opt_result = skopt.dummy_minimize(cost_func, bounds, **skopt_options) + opt_result = skopt.dummy_minimize(objective, bounds, **skopt_options) elif method == "forest": - opt_result = skopt.forest_minimize(cost_func, bounds, **skopt_options) + opt_result = skopt.forest_minimize(objective, bounds, **skopt_options) elif method == "gp": - opt_result = skopt.gp_minimize(cost_func, bounds, **skopt_options) + opt_result = skopt.gp_minimize(objective, bounds, **skopt_options) elif method == "gbrt": - opt_result = skopt.gbrt_minimize(cost_func, bounds, **skopt_options) + opt_result = skopt.gbrt_minimize(objective, bounds, **skopt_options) else: raise ValueError(f"invalid skopt method: {method}") except StopCriterionReached as e: @@ -64,7 +70,7 @@ def tune(searchspace: Searchspace, runner, tuning_options): print(e) if opt_result and tuning_options.verbose: - print(opt_result.message) + print(opt_result) return cost_func.results From 2e4c67c79060a4e7bd4f60c1560594ae77dc2a9d Mon Sep 17 00:00:00 2001 From: stijn Date: Thu, 23 Oct 2025 13:54:48 +0200 Subject: [PATCH 3/7] Add `scikit-optmize` as testing dependency --- kernel_tuner/interface.py | 2 -- pyproject.toml | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/kernel_tuner/interface.py b/kernel_tuner/interface.py index f6dca4cc..0641eb7e 100644 --- a/kernel_tuner/interface.py +++ b/kernel_tuner/interface.py @@ -597,7 +597,6 @@ def tune_kernel( kernelsource = core.KernelSource(kernel_name, kernel_source, lang, defines) - print("block_size_names", block_size_names) _check_user_input(kernel_name, kernelsource, arguments, block_size_names) # default objective if none is specified @@ -680,7 +679,6 @@ def preprocess_cache(filepath): # create search space tuning_options.restrictions_unmodified = deepcopy(restrictions) - print(searchspace_construction_options) searchspace = Searchspace(tune_params, restrictions, runner.dev.max_threads, **searchspace_construction_options) restrictions = searchspace._modified_restrictions tuning_options.restrictions = restrictions diff --git a/pyproject.toml b/pyproject.toml index ffc0583b..6afd9a04 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -128,6 +128,7 @@ nox-poetry = "^1.0.3" ruff = "^0.4.8" pep440 = "^0.1.2" tomli = "^2.0.1" # held back by Python <= 3.10, can be replaced by built-in [tomllib](https://docs.python.org/3.11/library/tomllib.html) from Python 3.11 onwards +scikit-optimize = "0.10" # development dependencies are unused for now, as this is already covered by test and docs # # ATTENTION: if anything is changed here, run `poetry update` From 885fe09f2e2ee94312612bd6c9810900eda41308 Mon Sep 17 00:00:00 2001 From: stijn Date: Mon, 8 Dec 2025 14:46:44 +0100 Subject: [PATCH 4/7] Use `skopt.Optimizer` object API, instead of `skopt.gp_minimize` function --- kernel_tuner/strategies/skopt.py | 116 ++++++++++++++++++------------- 1 file changed, 69 insertions(+), 47 deletions(-) diff --git a/kernel_tuner/strategies/skopt.py b/kernel_tuner/strategies/skopt.py index 20c83afe..8dd530f4 100644 --- a/kernel_tuner/strategies/skopt.py +++ b/kernel_tuner/strategies/skopt.py @@ -1,76 +1,98 @@ -"""The strategy that uses a minimizer method for searching through the parameter space.""" +"""The strategy that uses the optimizer from skopt for searching through the parameter space.""" +import numpy as np from kernel_tuner.util import StopCriterionReached from kernel_tuner.searchspace import Searchspace from kernel_tuner.strategies.common import ( CostFunc, get_options, - snap_to_nearest_config, get_strategy_docstring, ) -supported_methods = ["forest", "gbrt", "gp", "dummy"] +supported_learners = ["RF", "ET", "GBRT", "DUMMY", "GP"] +supported_acq = ["LCB", "EI", "PI","gp_hedge"] +supported_liars = ["cl_min", "cl_mean", "cl_max"] _options = dict( - method=(f"Local optimization algorithm to use, choose any from {supported_methods}", "gp"), - options=("Options passed to the skopt method as kwargs.", dict()), - popsize=("Number of initial samples. If `None`, let skopt choose the initial population", None), - maxiter=("Maximum number of times to repeat the method until the budget is exhausted.", 1), + learner=(f"The leaner to use (supported: {supported_learners})", "RF"), + acq_func=(f"The acquisition function to use (supported: {supported_acq})", "gp_hedge"), + lie_strategy=(f"The lie strategy to use when using batches (supported: {supported_liars})", "cl_max"), + kappa=("The value of kappa", 1.96), + num_initial=("Number of initial samples. If `None`, let skopt choose the initial population", None), + batch_size=("The number of points to ask per batch", 1), + skopt_kwargs=("Additional options passed to the skopt `Optimizer` as kwargs.", dict()), ) def tune(searchspace: Searchspace, runner, tuning_options): - import skopt - - method, skopt_options, popsize, maxiter = get_options(tuning_options.strategy_options, _options) + learner, acq_func, lie_strategy, kappa, num_initial, batch_size, skopt_kwargs = \ + get_options(tuning_options.strategy_options, _options) # Get maximum number of evaluations - max_fevals = searchspace.size - if "max_fevals" in tuning_options: - max_fevals = min(tuning_options["max_fevals"], max_fevals) - - # Set the maximum number of calls to 100 times the maximum number of evaluations. - # Not all calls by skopt will result in an evaluation since different calls might - # map to the same configuration. - if "n_calls" not in skopt_options: - skopt_options["n_calls"] = 100 * max_fevals - - # If the initial population size is specified, we select `popsize` samples - # from the search space. This is more efficient than letting skopt select - # the samples as it is not aware of restrictions. - if popsize: - x0 = searchspace.get_random_sample(min(popsize, max_fevals)) - skopt_options["x0"] = [searchspace.get_param_indices(x) for x in x0] - - opt_result = None - tune_params_values = list(searchspace.tune_params.values()) - bounds = [(0, len(p) - 1) if len(p) > 1 else [0] for p in tune_params_values] + max_fevals = min(tuning_options.get("max_fevals", np.inf), searchspace.size) + # Const function cost_func = CostFunc(searchspace, tuning_options, runner) - objective = lambda x: cost_func(searchspace.get_param_config_from_param_indices(x)) - space_constraint = lambda x: searchspace.is_param_config_valid(searchspace.get_param_config_from_param_indices(x)) + opt_config, opt_result = None, None - skopt_options["space_constraint"] = space_constraint - skopt_options["verbose"] = tuning_options.verbose + # The dimensions. Parameters with one value become categorical + from skopt.space.space import Categorical, Integer + tune_params_values = list(searchspace.tune_params.values()) + bounds = [Integer(0, len(p) - 1) if len(p) > 1 else Categorical([0]) for p in tune_params_values] + + # Space constraint + space_constraint = lambda x: searchspace.is_param_config_valid( + searchspace.get_param_config_from_param_indices(x)) + + # Create skopt optimizer + skopt_kwargs = dict(skopt_kwargs) + skopt_kwargs.setdefault("acq_func_kwargs", {})["kappa"] = kappa + + from skopt import Optimizer as SkOptimizer + optimizer = SkOptimizer( + dimensions=bounds, + base_estimator=learner, + n_initial_points=num_initial, + acq_func=acq_func, + space_constraint=space_constraint, + **skopt_kwargs + ) + + # Ask initial batch of configs + num_initial = optimizer._n_initial_points + batch = optimizer.ask(num_initial, lie_strategy) + Xs, Ys = [], [] + eval_count = 0 + + if tuning_options.verbose: + print(f"Asked optimizer for {num_initial} points: {batch}") try: - for _ in range(maxiter): - if method == "dummy": - opt_result = skopt.dummy_minimize(objective, bounds, **skopt_options) - elif method == "forest": - opt_result = skopt.forest_minimize(objective, bounds, **skopt_options) - elif method == "gp": - opt_result = skopt.gp_minimize(objective, bounds, **skopt_options) - elif method == "gbrt": - opt_result = skopt.gbrt_minimize(objective, bounds, **skopt_options) - else: - raise ValueError(f"invalid skopt method: {method}") + while eval_count < max_fevals: + if not batch: + optimizer.tell(Xs, Ys) + batch = optimizer.ask(batch_size, lie_strategy) + Xs, Ys = [], [] + + if tuning_options.verbose: + print(f"Asked optimizer for {batch_size} points: {batch}") + + x = batch.pop(0) + y = cost_func(searchspace.get_param_config_from_param_indices(x)) + eval_count += 1 + + Xs.append(x) + Ys.append(y) + + if opt_result is None or y < opt_result: + opt_config, opt_result = x, y + except StopCriterionReached as e: if tuning_options.verbose: print(e) - if opt_result and tuning_options.verbose: - print(opt_result) + if opt_result is not None and tuning_options.verbose: + print(f"Best configuration: {opt_result}") return cost_func.results From d451a2a0711feddd2e50721a6878d42c7d06c04a Mon Sep 17 00:00:00 2001 From: stijn Date: Mon, 8 Dec 2025 15:04:37 +0100 Subject: [PATCH 5/7] Update scikit-optimize to 0.10.2 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6afd9a04..47040753 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -128,7 +128,7 @@ nox-poetry = "^1.0.3" ruff = "^0.4.8" pep440 = "^0.1.2" tomli = "^2.0.1" # held back by Python <= 3.10, can be replaced by built-in [tomllib](https://docs.python.org/3.11/library/tomllib.html) from Python 3.11 onwards -scikit-optimize = "0.10" +scikit-optimize = "0.10.2" # development dependencies are unused for now, as this is already covered by test and docs # # ATTENTION: if anything is changed here, run `poetry update` From c05ed93bc5d90458b1593ff13be57e97fc709442 Mon Sep 17 00:00:00 2001 From: stijn Date: Mon, 8 Dec 2025 15:10:32 +0100 Subject: [PATCH 6/7] Change how `n_initial_points` is passed to `skopt.Optimizer` --- kernel_tuner/strategies/skopt.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/kernel_tuner/strategies/skopt.py b/kernel_tuner/strategies/skopt.py index 8dd530f4..f6c6578b 100644 --- a/kernel_tuner/strategies/skopt.py +++ b/kernel_tuner/strategies/skopt.py @@ -46,14 +46,23 @@ def tune(searchspace: Searchspace, runner, tuning_options): # Create skopt optimizer skopt_kwargs = dict(skopt_kwargs) - skopt_kwargs.setdefault("acq_func_kwargs", {})["kappa"] = kappa + skopt_kwargs["base_estimator"] = learner + skopt_kwargs["acq_func"] = acq_func + + # Only set n_initial_points if not None + if num_initial is not None: + skopt_kwargs["n_initial_points"] = num_initial + + # Set kappa is not None + if kappa is not None: + skopt_kwargs.setdefault("acq_func_kwargs", {})["kappa"] = kappa + + if tuning_options.verbose: + print(f"Initialize scikit-optimize Optimizer object: {skopt_kwargs}") from skopt import Optimizer as SkOptimizer optimizer = SkOptimizer( dimensions=bounds, - base_estimator=learner, - n_initial_points=num_initial, - acq_func=acq_func, space_constraint=space_constraint, **skopt_kwargs ) From d7d47ed957cb188c6ccfe8505f15419a4722257b Mon Sep 17 00:00:00 2001 From: stijn Date: Mon, 8 Dec 2025 15:22:52 +0100 Subject: [PATCH 7/7] Add support for `x0` with `skopt` search strategy --- kernel_tuner/strategies/skopt.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/kernel_tuner/strategies/skopt.py b/kernel_tuner/strategies/skopt.py index f6c6578b..977a47d5 100644 --- a/kernel_tuner/strategies/skopt.py +++ b/kernel_tuner/strategies/skopt.py @@ -32,7 +32,6 @@ def tune(searchspace: Searchspace, runner, tuning_options): max_fevals = min(tuning_options.get("max_fevals", np.inf), searchspace.size) # Const function - cost_func = CostFunc(searchspace, tuning_options, runner) opt_config, opt_result = None, None # The dimensions. Parameters with one value become categorical @@ -70,18 +69,26 @@ def tune(searchspace: Searchspace, runner, tuning_options): # Ask initial batch of configs num_initial = optimizer._n_initial_points batch = optimizer.ask(num_initial, lie_strategy) - Xs, Ys = [], [] + xs, ys = [], [] eval_count = 0 if tuning_options.verbose: print(f"Asked optimizer for {num_initial} points: {batch}") + # Create cost function + cost_func = CostFunc(searchspace, tuning_options, runner) + x0 = cost_func.get_start_pos() + + # Add x0 if the user has requested it + if x0 is not None: + batch.insert(0, searchspace.get_param_indices(x0)) + try: while eval_count < max_fevals: if not batch: - optimizer.tell(Xs, Ys) + optimizer.tell(xs, ys) batch = optimizer.ask(batch_size, lie_strategy) - Xs, Ys = [], [] + xs, ys = [], [] if tuning_options.verbose: print(f"Asked optimizer for {batch_size} points: {batch}") @@ -90,8 +97,8 @@ def tune(searchspace: Searchspace, runner, tuning_options): y = cost_func(searchspace.get_param_config_from_param_indices(x)) eval_count += 1 - Xs.append(x) - Ys.append(y) + xs.append(x) + ys.append(y) if opt_result is None or y < opt_result: opt_config, opt_result = x, y @@ -101,7 +108,7 @@ def tune(searchspace: Searchspace, runner, tuning_options): print(e) if opt_result is not None and tuning_options.verbose: - print(f"Best configuration: {opt_result}") + print(f"Best configuration: {opt_config}") return cost_func.results