From 809b297c0785f656aac3647fecdd0d552e22478f Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 26 Sep 2025 10:29:00 +0200 Subject: [PATCH 01/27] Rename effect domains --- .../example_calculation_types.py | 4 +- flixopt/calculation.py | 4 +- flixopt/effects.py | 92 ++++++------ flixopt/elements.py | 2 +- flixopt/features.py | 24 ++- tests/test_effect.py | 141 +++++++++--------- tests/test_functional.py | 6 +- tests/test_integration.py | 6 +- 8 files changed, 137 insertions(+), 142 deletions(-) diff --git a/examples/03_Calculation_types/example_calculation_types.py b/examples/03_Calculation_types/example_calculation_types.py index a92a20163..3413ddc09 100644 --- a/examples/03_Calculation_types/example_calculation_types.py +++ b/examples/03_Calculation_types/example_calculation_types.py @@ -204,14 +204,14 @@ def get_solutions(calcs: list, variable: str) -> xr.Dataset: ).write_html('results/BHKW2 Thermal Power.html') fx.plotting.with_plotly( - get_solutions(calculations, 'costs(operation)|total_per_timestep').to_dataframe(), + get_solutions(calculations, 'costs(temporal)|per_timestep').to_dataframe(), mode='line', title='Operation Cost Comparison', ylabel='Costs [€]', ).write_html('results/Operation Costs.html') fx.plotting.with_plotly( - pd.DataFrame(get_solutions(calculations, 'costs(operation)|total_per_timestep').to_dataframe().sum()).T, + pd.DataFrame(get_solutions(calculations, 'costs(temporal)|per_timestep').to_dataframe().sum()).T, mode='bar', title='Total Cost Comparison', ylabel='Costs [€]', diff --git a/flixopt/calculation.py b/flixopt/calculation.py index c912b083b..6ab4c7cbd 100644 --- a/flixopt/calculation.py +++ b/flixopt/calculation.py @@ -80,8 +80,8 @@ def main_results(self) -> dict[str, Scalar | dict]: 'Penalty': float(self.model.effects.penalty.total.solution.values), 'Effects': { f'{effect.label} [{effect.unit}]': { - 'operation': float(effect.model.operation.total.solution.values), - 'invest': float(effect.model.invest.total.solution.values), + 'temporal': float(effect.model.temporal.total.solution.values), + 'nontemporal': float(effect.model.nontemporal.total.solution.values), 'total': float(effect.model.total.solution.values), } for effect in self.flow_system.effects diff --git a/flixopt/effects.py b/flixopt/effects.py index 31c941e11..663334638 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -142,12 +142,12 @@ def __init__( is_objective: bool = False, specific_share_to_other_effects_operation: EffectValuesUser | None = None, specific_share_to_other_effects_invest: EffectValuesUser | None = None, - minimum_operation: Scalar | None = None, - maximum_operation: Scalar | None = None, - minimum_invest: Scalar | None = None, - maximum_invest: Scalar | None = None, - minimum_operation_per_hour: NumericDataTS | None = None, - maximum_operation_per_hour: NumericDataTS | None = None, + minimum_temporal: Scalar | None = None, + maximum_temporal: Scalar | None = None, + minimum_nontemporal: Scalar | None = None, + maximum_nontemporal: Scalar | None = None, + minimum_temporal_per_hour: NumericDataTS | None = None, + maximum_temporal_per_hour: NumericDataTS | None = None, minimum_total: Scalar | None = None, maximum_total: Scalar | None = None, ): @@ -161,22 +161,22 @@ def __init__( specific_share_to_other_effects_operation or {} ) self.specific_share_to_other_effects_invest: EffectValuesUser = specific_share_to_other_effects_invest or {} - self.minimum_operation = minimum_operation - self.maximum_operation = maximum_operation - self.minimum_operation_per_hour = minimum_operation_per_hour - self.maximum_operation_per_hour = maximum_operation_per_hour - self.minimum_invest = minimum_invest - self.maximum_invest = maximum_invest + self.minimum_temporal = minimum_temporal + self.maximum_temporal = maximum_temporal + self.minimum_temporal_per_hour = minimum_temporal_per_hour + self.maximum_temporal_per_hour = maximum_temporal_per_hour + self.minimum_nontemporal = minimum_nontemporal + self.maximum_nontemporal = maximum_nontemporal self.minimum_total = minimum_total self.maximum_total = maximum_total def transform_data(self, flow_system: FlowSystem): - self.minimum_operation_per_hour = flow_system.create_time_series( - f'{self.label_full}|minimum_operation_per_hour', self.minimum_operation_per_hour + self.minimum_temporal_per_hour = flow_system.create_time_series( + f'{self.label_full}|minimum_temporal_per_hour', self.minimum_temporal_per_hour ) - self.maximum_operation_per_hour = flow_system.create_time_series( - f'{self.label_full}|maximum_operation_per_hour', - self.maximum_operation_per_hour, + self.maximum_temporal_per_hour = flow_system.create_time_series( + f'{self.label_full}|maximum_temporal_per_hour', + self.maximum_temporal_per_hour, ) self.specific_share_to_other_effects_operation = flow_system.create_effect_time_series( @@ -198,32 +198,32 @@ def __init__(self, model: SystemModel, element: Effect): super().__init__(model, element) self.element: Effect = element self.total: linopy.Variable | None = None - self.invest: ShareAllocationModel = self.add( + self.nontemporal: ShareAllocationModel = self.add( ShareAllocationModel( self._model, False, self.label_of_element, - 'invest', - label_full=f'{self.label_full}(invest)', - total_max=self.element.maximum_invest, - total_min=self.element.minimum_invest, + 'nontemporal', + label_full=f'{self.label_full}(nontemporal)', + total_max=self.element.maximum_nontemporal, + total_min=self.element.minimum_nontemporal, ) ) - self.operation: ShareAllocationModel = self.add( + self.temporal: ShareAllocationModel = self.add( ShareAllocationModel( self._model, True, self.label_of_element, - 'operation', - label_full=f'{self.label_full}(operation)', - total_max=self.element.maximum_operation, - total_min=self.element.minimum_operation, - min_per_hour=self.element.minimum_operation_per_hour.active_data - if self.element.minimum_operation_per_hour is not None + 'temporal', + label_full=f'{self.label_full}(temporal)', + total_max=self.element.maximum_temporal, + total_min=self.element.minimum_temporal, + min_per_hour=self.element.minimum_temporal_per_hour.active_data + if self.element.minimum_temporal_per_hour is not None else None, - max_per_hour=self.element.maximum_operation_per_hour.active_data - if self.element.maximum_operation_per_hour is not None + max_per_hour=self.element.maximum_temporal_per_hour.active_data + if self.element.maximum_temporal_per_hour is not None else None, ) ) @@ -237,14 +237,14 @@ def do_modeling(self): lower=self.element.minimum_total if self.element.minimum_total is not None else -np.inf, upper=self.element.maximum_total if self.element.maximum_total is not None else np.inf, coords=None, - name=f'{self.label_full}|total', + name=f'{self.label_full}', ), 'total', ) self.add( self._model.add_constraints( - self.total == self.operation.total.sum() + self.invest.total.sum(), name=f'{self.label_full}|total' + self.total == self.temporal.total.sum() + self.nontemporal.total.sum(), name=f'{self.label_full}' ), 'total', ) @@ -424,13 +424,13 @@ def add_share_to_effects( self, name: str, expressions: EffectValuesExpr, - target: Literal['operation', 'invest'], + target: Literal['temporal', 'nontemporal'], ) -> None: for effect, expression in expressions.items(): - if target == 'operation': - self.effects[effect].model.operation.add_share(name, expression) - elif target == 'invest': - self.effects[effect].model.invest.add_share(name, expression) + if target == 'temporal': + self.effects[effect].model.temporal.add_share(name, expression) + elif target == 'nontemporal': + self.effects[effect].model.nontemporal.add_share(name, expression) else: raise ValueError(f'Target {target} not supported!') @@ -454,15 +454,15 @@ def do_modeling(self): def _add_share_between_effects(self): for origin_effect in self.effects: - # 1. operation: -> hier sind es Zeitreihen (share_TS) + # 1. temporal: -> hier sind es Zeitreihen (share_TS) for target_effect, time_series in origin_effect.specific_share_to_other_effects_operation.items(): - self.effects[target_effect].model.operation.add_share( - origin_effect.model.operation.label_full, - origin_effect.model.operation.total_per_timestep * time_series.active_data, + self.effects[target_effect].model.temporal.add_share( + origin_effect.model.temporal.label_full, + origin_effect.model.temporal.total_per_timestep * time_series.active_data, ) - # 2. invest: -> hier ist es Scalar (share) + # 2. nontemporal: -> hier ist es Scalar (share) for target_effect, factor in origin_effect.specific_share_to_other_effects_invest.items(): - self.effects[target_effect].model.invest.add_share( - origin_effect.model.invest.label_full, - origin_effect.model.invest.total * factor, + self.effects[target_effect].model.nontemporal.add_share( + origin_effect.model.nontemporal.label_full, + origin_effect.model.nontemporal.total * factor, ) diff --git a/flixopt/elements.py b/flixopt/elements.py index 22256b636..8a611109d 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -580,7 +580,7 @@ def _create_shares(self): effect: self.flow_rate * self._model.hours_per_step * factor.active_data for effect, factor in self.element.effects_per_flow_hour.items() }, - target='operation', + target='temporal', ) def _create_bounds_for_load_factor(self): diff --git a/flixopt/features.py b/flixopt/features.py index 5528917e0..dce268f99 100644 --- a/flixopt/features.py +++ b/flixopt/features.py @@ -86,7 +86,7 @@ def _create_shares(self): effect: self.is_invested * factor if self.is_invested is not None else factor for effect, factor in fix_effects.items() }, - target='invest', + target='nontemporal', ) if self.parameters.divest_effects != {} and self.parameters.optional: @@ -97,14 +97,14 @@ def _create_shares(self): effect: -self.is_invested * factor + factor for effect, factor in self.parameters.divest_effects.items() }, - target='invest', + target='nontemporal', ) if self.parameters.specific_effects != {}: self._model.effects.add_share_to_effects( name=self.label_of_element, expressions={effect: self.size * factor for effect, factor in self.parameters.specific_effects.items()}, - target='invest', + target='nontemporal', ) if self.parameters.piecewise_effects: @@ -736,7 +736,7 @@ def _create_shares(self): effect: self.state_model.on * factor * self._model.hours_per_step for effect, factor in self.parameters.effects_per_running_hour.items() }, - target='operation', + target='temporal', ) if self.parameters.effects_per_switch_on: @@ -746,7 +746,7 @@ def _create_shares(self): effect: self.switch_state_model.switch_on * factor for effect, factor in self.parameters.effects_per_switch_on.items() }, - target='operation', + target='temporal', ) @property @@ -956,14 +956,12 @@ def __init__( def do_modeling(self): self.total = self.add( self._model.add_variables( - lower=self._total_min, upper=self._total_max, coords=None, name=f'{self.label_full}|total' + lower=self._total_min, upper=self._total_max, coords=None, name=f'{self.label_full}' ), 'total', ) # eq: sum = sum(share_i) # skalar - self._eq_total = self.add( - self._model.add_constraints(self.total == 0, name=f'{self.label_full}|total'), 'total' - ) + self._eq_total = self.add(self._model.add_constraints(self.total == 0, name=f'{self.label_full}')) if self._shares_are_time_series: self.total_per_timestep = self.add( @@ -975,14 +973,14 @@ def do_modeling(self): if (self._max_per_hour is None) else np.multiply(self._max_per_hour, self._model.hours_per_step), coords=self._model.coords, - name=f'{self.label_full}|total_per_timestep', + name=f'{self.label_full}|per_timestep', ), - 'total_per_timestep', + 'per_timestep', ) self._eq_total_per_timestep = self.add( - self._model.add_constraints(self.total_per_timestep == 0, name=f'{self.label_full}|total_per_timestep'), - 'total_per_timestep', + self._model.add_constraints(self.total_per_timestep == 0, name=f'{self.label_full}|per_timestep'), + 'per_timestep', ) # Add it to the total diff --git a/tests/test_effect.py b/tests/test_effect.py index b4a618ea6..33125a0a0 100644 --- a/tests/test_effect.py +++ b/tests/test_effect.py @@ -15,39 +15,36 @@ def test_minimal(self, basic_flow_system_linopy): model = create_linopy_model(flow_system) assert set(effect.model.variables) == { - 'Effect1(invest)|total', - 'Effect1(operation)|total', - 'Effect1(operation)|total_per_timestep', - 'Effect1|total', + 'Effect1(nontemporal)', + 'Effect1(temporal)', + 'Effect1(temporal)|per_timestep', + 'Effect1', } assert set(effect.model.constraints) == { - 'Effect1(invest)|total', - 'Effect1(operation)|total', - 'Effect1(operation)|total_per_timestep', - 'Effect1|total', + 'Effect1(nontemporal)', + 'Effect1(temporal)', + 'Effect1(temporal)|per_timestep', + 'Effect1', } - assert_var_equal(model.variables['Effect1|total'], model.add_variables()) - assert_var_equal(model.variables['Effect1(invest)|total'], model.add_variables()) - assert_var_equal(model.variables['Effect1(operation)|total'], model.add_variables()) - assert_var_equal( - model.variables['Effect1(operation)|total_per_timestep'], model.add_variables(coords=(timesteps,)) - ) + assert_var_equal(model.variables['Effect1'], model.add_variables()) + assert_var_equal(model.variables['Effect1(nontemporal)'], model.add_variables()) + assert_var_equal(model.variables['Effect1(temporal)'], model.add_variables()) + assert_var_equal(model.variables['Effect1(temporal)|per_timestep'], model.add_variables(coords=(timesteps,))) assert_conequal( - model.constraints['Effect1|total'], - model.variables['Effect1|total'] - == model.variables['Effect1(operation)|total'] + model.variables['Effect1(invest)|total'], + model.constraints['Effect1'], + model.variables['Effect1'] + == model.variables['Effect1(temporal)'] + model.variables['Effect1(nontemporal)'], ) - assert_conequal(model.constraints['Effect1(invest)|total'], model.variables['Effect1(invest)|total'] == 0) + assert_conequal(model.constraints['Effect1(nontemporal)'], model.variables['Effect1(nontemporal)'] == 0) assert_conequal( - model.constraints['Effect1(operation)|total'], - model.variables['Effect1(operation)|total'] - == model.variables['Effect1(operation)|total_per_timestep'].sum(), + model.constraints['Effect1(temporal)'], + model.variables['Effect1(temporal)'] == model.variables['Effect1(temporal)|per_timestep'].sum(), ) assert_conequal( - model.constraints['Effect1(operation)|total_per_timestep'], - model.variables['Effect1(operation)|total_per_timestep'] == 0, + model.constraints['Effect1(temporal)|per_timestep'], + model.variables['Effect1(temporal)|per_timestep'] == 0, ) def test_bounds(self, basic_flow_system_linopy): @@ -57,56 +54,55 @@ def test_bounds(self, basic_flow_system_linopy): 'Effect1', '€', 'Testing Effect', - minimum_operation=1.0, - maximum_operation=1.1, - minimum_invest=2.0, - maximum_invest=2.1, + minimum_temporal=1.0, + maximum_temporal=1.1, + minimum_nontemporal=2.0, + maximum_nontemporal=2.1, minimum_total=3.0, maximum_total=3.1, - minimum_operation_per_hour=4.0, - maximum_operation_per_hour=4.1, + minimum_temporal_per_hour=4.0, + maximum_temporal_per_hour=4.1, ) flow_system.add_elements(effect) model = create_linopy_model(flow_system) assert set(effect.model.variables) == { - 'Effect1(invest)|total', - 'Effect1(operation)|total', - 'Effect1(operation)|total_per_timestep', - 'Effect1|total', + 'Effect1(nontemporal)', + 'Effect1(temporal)', + 'Effect1(temporal)|per_timestep', + 'Effect1', } assert set(effect.model.constraints) == { - 'Effect1(invest)|total', - 'Effect1(operation)|total', - 'Effect1(operation)|total_per_timestep', - 'Effect1|total', + 'Effect1(nontemporal)', + 'Effect1(temporal)', + 'Effect1(temporal)|per_timestep', + 'Effect1', } - assert_var_equal(model.variables['Effect1|total'], model.add_variables(lower=3.0, upper=3.1)) - assert_var_equal(model.variables['Effect1(invest)|total'], model.add_variables(lower=2.0, upper=2.1)) - assert_var_equal(model.variables['Effect1(operation)|total'], model.add_variables(lower=1.0, upper=1.1)) + assert_var_equal(model.variables['Effect1'], model.add_variables(lower=3.0, upper=3.1)) + assert_var_equal(model.variables['Effect1(nontemporal)'], model.add_variables(lower=2.0, upper=2.1)) + assert_var_equal(model.variables['Effect1(temporal)'], model.add_variables(lower=1.0, upper=1.1)) assert_var_equal( - model.variables['Effect1(operation)|total_per_timestep'], + model.variables['Effect1(temporal)|per_timestep'], model.add_variables( lower=4.0 * model.hours_per_step, upper=4.1 * model.hours_per_step, coords=(timesteps,) ), ) assert_conequal( - model.constraints['Effect1|total'], - model.variables['Effect1|total'] - == model.variables['Effect1(operation)|total'] + model.variables['Effect1(invest)|total'], + model.constraints['Effect1'], + model.variables['Effect1'] + == model.variables['Effect1(temporal)'] + model.variables['Effect1(nontemporal)'], ) - assert_conequal(model.constraints['Effect1(invest)|total'], model.variables['Effect1(invest)|total'] == 0) + assert_conequal(model.constraints['Effect1(nontemporal)'], model.variables['Effect1(nontemporal)'] == 0) assert_conequal( - model.constraints['Effect1(operation)|total'], - model.variables['Effect1(operation)|total'] - == model.variables['Effect1(operation)|total_per_timestep'].sum(), + model.constraints['Effect1(temporal)'], + model.variables['Effect1(temporal)'] == model.variables['Effect1(temporal)|per_timestep'].sum(), ) assert_conequal( - model.constraints['Effect1(operation)|total_per_timestep'], - model.variables['Effect1(operation)|total_per_timestep'] == 0, + model.constraints['Effect1(temporal)|per_timestep'], + model.variables['Effect1(temporal)|per_timestep'] == 0, ) def test_shares(self, basic_flow_system_linopy): @@ -124,40 +120,41 @@ def test_shares(self, basic_flow_system_linopy): model = create_linopy_model(flow_system) assert set(effect2.model.variables) == { - 'Effect2(invest)|total', - 'Effect2(operation)|total', - 'Effect2(operation)|total_per_timestep', - 'Effect2|total', - 'Effect1(invest)->Effect2(invest)', - 'Effect1(operation)->Effect2(operation)', + 'Effect2(nontemporal)', + 'Effect2(temporal)', + 'Effect2(temporal)|per_timestep', + 'Effect2', + 'Effect1(nontemporal)->Effect2(nontemporal)', + 'Effect1(temporal)->Effect2(temporal)', } assert set(effect2.model.constraints) == { - 'Effect2(invest)|total', - 'Effect2(operation)|total', - 'Effect2(operation)|total_per_timestep', - 'Effect2|total', - 'Effect1(invest)->Effect2(invest)', - 'Effect1(operation)->Effect2(operation)', + 'Effect2(nontemporal)', + 'Effect2(temporal)', + 'Effect2(temporal)|per_timestep', + 'Effect2', + 'Effect1(nontemporal)->Effect2(nontemporal)', + 'Effect1(temporal)->Effect2(temporal)', } assert_conequal( - model.constraints['Effect2(invest)|total'], - model.variables['Effect2(invest)|total'] == model.variables['Effect1(invest)->Effect2(invest)'], + model.constraints['Effect2(nontemporal)'], + model.variables['Effect2(nontemporal)'] == model.variables['Effect1(nontemporal)->Effect2(nontemporal)'], ) assert_conequal( - model.constraints['Effect2(operation)|total_per_timestep'], - model.variables['Effect2(operation)|total_per_timestep'] - == model.variables['Effect1(operation)->Effect2(operation)'], + model.constraints['Effect2(temporal)|per_timestep'], + model.variables['Effect2(temporal)|per_timestep'] + == model.variables['Effect1(temporal)->Effect2(temporal)'], ) assert_conequal( - model.constraints['Effect1(operation)->Effect2(operation)'], - model.variables['Effect1(operation)->Effect2(operation)'] - == model.variables['Effect1(operation)|total_per_timestep'] * 1.1, + model.constraints['Effect1(temporal)->Effect2(temporal)'], + model.variables['Effect1(temporal)->Effect2(temporal)'] + == model.variables['Effect1(temporal)|per_timestep'] * 1.1, ) assert_conequal( - model.constraints['Effect1(invest)->Effect2(invest)'], - model.variables['Effect1(invest)->Effect2(invest)'] == model.variables['Effect1(invest)|total'] * 2.1, + model.constraints['Effect1(nontemporal)->Effect2(nontemporal)'], + model.variables['Effect1(nontemporal)->Effect2(nontemporal)'] + == model.variables['Effect1(nontemporal)'] * 2.1, ) diff --git a/tests/test_functional.py b/tests/test_functional.py index 5db83f656..e542dd265 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -112,7 +112,7 @@ def test_solve_and_load(solver_fixture, time_steps_fixture): def test_minimal_model(solver_fixture, time_steps_fixture): results = solve_and_load(flow_system_minimal(time_steps_fixture), solver_fixture) - assert_allclose(results.model.variables['costs|total'].solution.values, 80, rtol=1e-5, atol=1e-10) + assert_allclose(results.model.variables['costs'].solution.values, 80, rtol=1e-5, atol=1e-10) assert_allclose( results.model.variables['Boiler(Q_th)|flow_rate'].solution.values, @@ -122,14 +122,14 @@ def test_minimal_model(solver_fixture, time_steps_fixture): ) assert_allclose( - results.model.variables['costs(operation)|total_per_timestep'].solution.values, + results.model.variables['costs(temporal)|per_timestep'].solution.values, [-0.0, 20.0, 40.0, -0.0, 20.0], rtol=1e-5, atol=1e-10, ) assert_allclose( - results.model.variables['Gastarif(Gas)->costs(operation)'].solution.values, + results.model.variables['Gastarif(Gas)->costs(temporal)'].solution.values, [-0.0, 20.0, 40.0, -0.0, 20.0], rtol=1e-5, atol=1e-10, diff --git a/tests/test_integration.py b/tests/test_integration.py index 42fb5f0b7..7ecb1775c 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -185,7 +185,7 @@ def test_basic_flow_system(self, flow_system_base, highs_solver): ) assert_almost_equal_numeric( - calculation.results.model['costs(operation)|total_per_timestep'].solution.values, + calculation.results.model['costs(temporal)|per_timestep'].solution.values, [ -2.38500000e03, -2.21681333e03, @@ -249,7 +249,7 @@ def test_basic_flow_system(self, flow_system_base, highs_solver): 'CO2 doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['CO2(invest)|total'].solution.values, + calculation.results.model['CO2(nontemporal)'].solution.values, 0.9999999999999994, 'CO2 doesnt match expected value', ) @@ -413,7 +413,7 @@ def test_modeling_types_costs(self, modeling_calculation): ) else: assert_almost_equal_numeric( - calc.results.solution_without_overlap('costs(operation)|total_per_timestep').sum(), + calc.results.solution_without_overlap('costs(temporal)|per_timestep').sum(), expected_costs[modeling_type], f'Costs do not match for {modeling_type} modeling type', ) From d69b1cae573b8447746b9f21ca115bed60774911 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 26 Sep 2025 11:52:52 +0200 Subject: [PATCH 02/27] Rename effect domains --- examples/01_Simple/simple_example.py | 2 +- flixopt/effects.py | 20 ++++++------ flixopt/features.py | 2 +- tests/conftest.py | 2 +- tests/test_flow.py | 49 ++++++++++++++-------------- tests/test_integration.py | 26 +++++++-------- tests/test_io.py | 4 +-- tests/test_linear_converter.py | 12 +++---- 8 files changed, 59 insertions(+), 58 deletions(-) diff --git a/examples/01_Simple/simple_example.py b/examples/01_Simple/simple_example.py index 8239f805a..626109344 100644 --- a/examples/01_Simple/simple_example.py +++ b/examples/01_Simple/simple_example.py @@ -37,7 +37,7 @@ unit='kg', description='CO2_e-Emissionen', specific_share_to_other_effects_operation={costs.label: 0.2}, - maximum_operation_per_hour=1000, # Max CO2 emissions per hour + maximum_temporal_per_hour=1000, # Max CO2 emissions per hour ) # --- Define Flow System Components --- diff --git a/flixopt/effects.py b/flixopt/effects.py index 663334638..5d57a0c59 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -52,14 +52,14 @@ class Effect(Element): Maps this effect's operational values to contributions to other effects specific_share_to_other_effects_invest: Investment cross-effect contributions. Maps this effect's investment values to contributions to other effects. - minimum_operation: Minimum allowed total operational contribution across all timesteps. - maximum_operation: Maximum allowed total operational contribution across all timesteps. - minimum_operation_per_hour: Minimum allowed operational contribution per timestep. - maximum_operation_per_hour: Maximum allowed operational contribution per timestep. - minimum_invest: Minimum allowed total investment contribution. - maximum_invest: Maximum allowed total investment contribution. - minimum_total: Minimum allowed total effect (operation + investment combined). - maximum_total: Maximum allowed total effect (operation + investment combined). + minimum_temporal: Minimum allowed total contribution across all timesteps. + maximum_temporal: Maximum allowed total contribution across all timesteps. + minimum_temporal_per_hour: Minimum allowed contribution per hour. + maximum_temporal_per_hour: Maximum allowed contribution per hour. + minimum_nontemporal: Minimum allowed total nontemporal contribution. + maximum_nontemporal: Maximum allowed total nontemporal contribution. + minimum_total: Minimum allowed total effect (temporal + nontemporal combined). + maximum_total: Maximum allowed total effect (temporal + nontemporal combined). meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -111,8 +111,8 @@ class Effect(Element): label='water_consumption', unit='m³', description='Industrial water usage', - minimum_operation_per_hour=10, # Minimum 10 m³/h for process stability - maximum_operation_per_hour=500, # Maximum 500 m³/h capacity limit + minimum_temporal_per_hour=10, # Minimum 10 m³/h for process stability + maximum_temporal_per_hour=500, # Maximum 500 m³/h capacity limit maximum_total=100_000, # Annual permit limit: 100,000 m³ ) ``` diff --git a/flixopt/features.py b/flixopt/features.py index dce268f99..0c22e739f 100644 --- a/flixopt/features.py +++ b/flixopt/features.py @@ -1076,7 +1076,7 @@ def do_modeling(self): self._model.effects.add_share_to_effects( name=self.label_of_element, expressions={effect: variable * 1 for effect, variable in self.shares.items()}, - target='invest', + target='nontemporal', ) diff --git a/tests/conftest.py b/tests/conftest.py index ac2bab5f4..124bd4cd5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -62,7 +62,7 @@ def simple_flow_system() -> fx.FlowSystem: 'kg', 'CO2_e-Emissionen', specific_share_to_other_effects_operation={costs.label: 0.2}, - maximum_operation_per_hour=1000, + maximum_temporal_per_hour=1000, ) # Create components diff --git a/tests/test_flow.py b/tests/test_flow.py index f7c5d8a69..29c53968b 100644 --- a/tests/test_flow.py +++ b/tests/test_flow.py @@ -99,18 +99,18 @@ def test_effects_per_flow_hour(self, basic_flow_system_linopy): assert set(flow.model.variables) == {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate'} assert set(flow.model.constraints) == {'Sink(Wärme)|total_flow_hours'} - assert 'Sink(Wärme)->Costs(operation)' in set(costs.model.constraints) - assert 'Sink(Wärme)->CO2(operation)' in set(co2.model.constraints) + assert 'Sink(Wärme)->Costs(temporal)' in set(costs.model.constraints) + assert 'Sink(Wärme)->CO2(temporal)' in set(co2.model.constraints) assert_conequal( - model.constraints['Sink(Wärme)->Costs(operation)'], - model.variables['Sink(Wärme)->Costs(operation)'] + model.constraints['Sink(Wärme)->Costs(temporal)'], + model.variables['Sink(Wärme)->Costs(temporal)'] == flow.model.variables['Sink(Wärme)|flow_rate'] * model.hours_per_step * costs_per_flow_hour, ) assert_conequal( - model.constraints['Sink(Wärme)->CO2(operation)'], - model.variables['Sink(Wärme)->CO2(operation)'] + model.constraints['Sink(Wärme)->CO2(temporal)'], + model.variables['Sink(Wärme)->CO2(temporal)'] == flow.model.variables['Sink(Wärme)|flow_rate'] * model.hours_per_step * co2_per_flow_hour, ) @@ -402,19 +402,19 @@ def test_flow_invest_with_effects(self, basic_flow_system_linopy): model = create_linopy_model(flow_system) # Check investment effects - assert 'Sink(Wärme)->Costs(invest)' in model.variables - assert 'Sink(Wärme)->CO2(invest)' in model.variables + assert 'Sink(Wärme)->Costs(nontemporal)' in model.variables + assert 'Sink(Wärme)->CO2(nontemporal)' in model.variables # Check fix effects (applied only when is_invested=1) assert_conequal( - model.constraints['Sink(Wärme)->Costs(invest)'], - model.variables['Sink(Wärme)->Costs(invest)'] + model.constraints['Sink(Wärme)->Costs(nontemporal)'], + model.variables['Sink(Wärme)->Costs(nontemporal)'] == flow.model.variables['Sink(Wärme)|is_invested'] * 1000 + flow.model.variables['Sink(Wärme)|size'] * 500, ) assert_conequal( - model.constraints['Sink(Wärme)->CO2(invest)'], - model.variables['Sink(Wärme)->CO2(invest)'] + model.constraints['Sink(Wärme)->CO2(nontemporal)'], + model.variables['Sink(Wärme)->CO2(nontemporal)'] == flow.model.variables['Sink(Wärme)|is_invested'] * 5 + flow.model.variables['Sink(Wärme)|size'] * 0.1, ) @@ -437,11 +437,12 @@ def test_flow_invest_divest_effects(self, basic_flow_system_linopy): model = create_linopy_model(flow_system) # Check divestment effects - assert 'Sink(Wärme)->Costs(invest)' in model.constraints + assert 'Sink(Wärme)->Costs(nontemporal)' in model.constraints assert_conequal( - model.constraints['Sink(Wärme)->Costs(invest)'], - model.variables['Sink(Wärme)->Costs(invest)'] + (model.variables['Sink(Wärme)|is_invested'] - 1) * 500 == 0, + model.constraints['Sink(Wärme)->Costs(nontemporal)'], + model.variables['Sink(Wärme)->Costs(nontemporal)'] + (model.variables['Sink(Wärme)|is_invested'] - 1) * 500 + == 0, ) @@ -539,18 +540,18 @@ def test_effects_per_running_hour(self, basic_flow_system_linopy): 'Sink(Wärme)|on_hours_total', } - assert 'Sink(Wärme)->Costs(operation)' in set(costs.model.constraints) - assert 'Sink(Wärme)->CO2(operation)' in set(co2.model.constraints) + assert 'Sink(Wärme)->Costs(temporal)' in set(costs.model.constraints) + assert 'Sink(Wärme)->CO2(temporal)' in set(co2.model.constraints) assert_conequal( - model.constraints['Sink(Wärme)->Costs(operation)'], - model.variables['Sink(Wärme)->Costs(operation)'] + model.constraints['Sink(Wärme)->Costs(temporal)'], + model.variables['Sink(Wärme)->Costs(temporal)'] == flow.model.variables['Sink(Wärme)|on'] * model.hours_per_step * costs_per_running_hour, ) assert_conequal( - model.constraints['Sink(Wärme)->CO2(operation)'], - model.variables['Sink(Wärme)->CO2(operation)'] + model.constraints['Sink(Wärme)->CO2(temporal)'], + model.variables['Sink(Wärme)->CO2(temporal)'] == flow.model.variables['Sink(Wärme)|on'] * model.hours_per_step * co2_per_running_hour, ) @@ -885,12 +886,12 @@ def test_switch_on_constraints(self, basic_flow_system_linopy): ) # Check that startup cost effect constraint exists - assert 'Sink(Wärme)->Costs(operation)' in model.constraints + assert 'Sink(Wärme)->Costs(temporal)' in model.constraints # Verify the startup cost effect constraint assert_conequal( - model.constraints['Sink(Wärme)->Costs(operation)'], - model.variables['Sink(Wärme)->Costs(operation)'] == flow.model.variables['Sink(Wärme)|switch_on'] * 100, + model.constraints['Sink(Wärme)->Costs(temporal)'], + model.variables['Sink(Wärme)->Costs(temporal)'] == flow.model.variables['Sink(Wärme)|switch_on'] * 100, ) def test_on_hours_limits(self, basic_flow_system_linopy): diff --git a/tests/test_integration.py b/tests/test_integration.py index 7ecb1775c..1d331df00 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -63,11 +63,11 @@ def test_results_persistence(self, simple_flow_system, highs_solver): # Verify key variables from loaded results assert_almost_equal_numeric( - results.solution['costs|total'].values, + results.solution['costs'].values, 81.88394666666667, 'costs doesnt match expected value', ) - assert_almost_equal_numeric(results.solution['CO2|total'].values, 255.09184, 'CO2 doesnt match expected value') + assert_almost_equal_numeric(results.solution['CO2'].values, 255.09184, 'CO2 doesnt match expected value') class TestComponents: @@ -179,7 +179,7 @@ def test_basic_flow_system(self, flow_system_base, highs_solver): # Assertions assert_almost_equal_numeric( - calculation.results.model['costs|total'].solution.item(), + calculation.results.model['costs'].solution.item(), -11597.873624489237, 'costs doesnt match expected value', ) @@ -201,50 +201,50 @@ def test_basic_flow_system(self, flow_system_base, highs_solver): ) assert_almost_equal_numeric( - sum(calculation.results.model['CO2(operation)->costs(operation)'].solution.values), + sum(calculation.results.model['CO2(temporal)->costs(temporal)'].solution.values), 258.63729669618675, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['Kessel(Q_th)->costs(operation)'].solution.values), + sum(calculation.results.model['Kessel(Q_th)->costs(temporal)'].solution.values), 0.01, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['Kessel->costs(operation)'].solution.values), + sum(calculation.results.model['Kessel->costs(temporal)'].solution.values), -0.0, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['Gastarif(Q_Gas)->costs(operation)'].solution.values), + sum(calculation.results.model['Gastarif(Q_Gas)->costs(temporal)'].solution.values), 39.09153113079115, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['Einspeisung(P_el)->costs(operation)'].solution.values), + sum(calculation.results.model['Einspeisung(P_el)->costs(temporal)'].solution.values), -14196.61245231646, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['KWK->costs(operation)'].solution.values), + sum(calculation.results.model['KWK->costs(temporal)'].solution.values), 0.0, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['Kessel(Q_th)->costs(invest)'].solution.values, + calculation.results.model['Kessel(Q_th)->costs(nontemporal)'].solution.values, 1000 + 500, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['Speicher->costs(invest)'].solution.values, + calculation.results.model['Speicher->costs(nontemporal)'].solution.values, 800 + 1, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['CO2(operation)|total'].solution.values, + calculation.results.model['CO2(temporal)'].solution.values, 1293.1864834809337, 'CO2 doesnt match expected value', ) @@ -407,7 +407,7 @@ def test_modeling_types_costs(self, modeling_calculation): if modeling_type in ['full', 'aggregated']: assert_almost_equal_numeric( - calc.results.model['costs|total'].solution.item(), + calc.results.model['costs'].solution.item(), expected_costs[modeling_type], f'Costs do not match for {modeling_type} modeling type', ) diff --git a/tests/test_io.py b/tests/test_io.py index 2ec74955f..93bcbb1d0 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -42,8 +42,8 @@ def test_flow_system_file_io(flow_system, highs_solver): ) assert_almost_equal_numeric( - calculation_0.results.solution['costs|total'].values, - calculation_1.results.solution['costs|total'].values, + calculation_0.results.solution['costs'].values, + calculation_1.results.solution['costs'].values, 'costs doesnt match expected value', ) diff --git a/tests/test_linear_converter.py b/tests/test_linear_converter.py index 93ace3e78..100ce5bee 100644 --- a/tests/test_linear_converter.py +++ b/tests/test_linear_converter.py @@ -184,10 +184,10 @@ def test_linear_converter_with_on_off(self, basic_flow_system_linopy): ) # Check on_off effects - assert 'Converter->Costs(operation)' in model.constraints + assert 'Converter->Costs(temporal)' in model.constraints assert_conequal( - model.constraints['Converter->Costs(operation)'], - model.variables['Converter->Costs(operation)'] + model.constraints['Converter->Costs(temporal)'], + model.variables['Converter->Costs(temporal)'] == converter.model.on_off.variables['Converter|on'] * model.hours_per_step * 5, ) @@ -488,10 +488,10 @@ def test_piecewise_conversion_with_onoff(self, basic_flow_system_linopy): ) # Verify that the costs effect is applied - assert 'Converter->Costs(operation)' in model.constraints + assert 'Converter->Costs(temporal)' in model.constraints assert_conequal( - model.constraints['Converter->Costs(operation)'], - model.variables['Converter->Costs(operation)'] + model.constraints['Converter->Costs(temporal)'], + model.variables['Converter->Costs(temporal)'] == converter.model.on_off.variables['Converter|on'] * model.hours_per_step * 5, ) From 8e44b9347ab620cb8c6897882efc836513bf3be8 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 26 Sep 2025 15:10:15 +0200 Subject: [PATCH 03/27] Ensure backwards compatability --- flixopt/effects.py | 75 +++++++++++++++++++++++++++++++++++++++++++ flixopt/results.py | 79 ++++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 147 insertions(+), 7 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index 5d57a0c59..9c914e2ba 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -63,6 +63,14 @@ class Effect(Element): meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. + **Deprecated Parameters** (for backwards compatibility): + minimum_operation: Use `minimum_temporal` instead. + maximum_operation: Use `maximum_temporal` instead. + minimum_invest: Use `minimum_nontemporal` instead. + maximum_invest: Use `maximum_nontemporal` instead. + minimum_operation_per_hour: Use `minimum_temporal_per_hour` instead. + maximum_operation_per_hour: Use `maximum_temporal_per_hour` instead. + Examples: Basic cost objective: @@ -150,6 +158,14 @@ def __init__( maximum_temporal_per_hour: NumericDataTS | None = None, minimum_total: Scalar | None = None, maximum_total: Scalar | None = None, + # Backwards compatibility parameters (deprecated) + minimum_operation: Scalar | None = None, + maximum_operation: Scalar | None = None, + minimum_invest: Scalar | None = None, + maximum_invest: Scalar | None = None, + minimum_operation_per_hour: NumericDataTS | None = None, + maximum_operation_per_hour: NumericDataTS | None = None, + **kwargs, ): super().__init__(label, meta_data=meta_data) self.label = label @@ -161,6 +177,65 @@ def __init__( specific_share_to_other_effects_operation or {} ) self.specific_share_to_other_effects_invest: EffectValuesUser = specific_share_to_other_effects_invest or {} + # Handle backwards compatibility for deprecated parameters + import warnings + + # Check for deprecated parameters and issue warnings + if minimum_operation is not None: + warnings.warn( + "Parameter 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + if minimum_temporal is None: + minimum_temporal = minimum_operation + + if maximum_operation is not None: + warnings.warn( + "Parameter 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + if maximum_temporal is None: + maximum_temporal = maximum_operation + + if minimum_invest is not None: + warnings.warn( + "Parameter 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + if minimum_nontemporal is None: + minimum_nontemporal = minimum_invest + + if maximum_invest is not None: + warnings.warn( + "Parameter 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + if maximum_nontemporal is None: + maximum_nontemporal = maximum_invest + + if minimum_operation_per_hour is not None: + warnings.warn( + "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + if minimum_temporal_per_hour is None: + minimum_temporal_per_hour = minimum_operation_per_hour + + if maximum_operation_per_hour is not None: + warnings.warn( + "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + if maximum_temporal_per_hour is None: + maximum_temporal_per_hour = maximum_operation_per_hour + + # Assign the final values (either new parameters or backwards-compatible ones) self.minimum_temporal = minimum_temporal self.maximum_temporal = maximum_temporal self.minimum_temporal_per_hour = minimum_temporal_per_hour diff --git a/flixopt/results.py b/flixopt/results.py index 5a76e3e0a..0a123783f 100644 --- a/flixopt/results.py +++ b/flixopt/results.py @@ -4,6 +4,7 @@ import json import logging import pathlib +import warnings from typing import TYPE_CHECKING, Literal import linopy @@ -27,6 +28,65 @@ logger = logging.getLogger('flixopt') +class BackwardsCompatibleDataset: + """Wrapper around xarray.Dataset to provide backwards compatibility for renamed variables.""" + + # Mapping from old variable names to new variable names + DEPRECATED_VARIABLE_MAPPING = { + # Effect variable names + 'costs|total': 'costs', + # Cross-effect variable names (operation -> temporal, invest -> nontemporal) + # This will be handled dynamically in __getitem__ + } + + def __init__(self, dataset: xr.Dataset): + self._dataset = dataset + + def __getitem__(self, key): + """Access dataset variables with backwards compatibility.""" + # Handle direct mapping first + if key in self.DEPRECATED_VARIABLE_MAPPING: + new_key = self.DEPRECATED_VARIABLE_MAPPING[key] + warnings.warn( + f"Variable name '{key}' is deprecated. Use '{new_key}' instead.", DeprecationWarning, stacklevel=2 + ) + return self._dataset[new_key] + + # Handle cross-effect variables dynamically + if '->' in key and ('(operation)' in key or '(invest)' in key): + # Replace (operation) -> (temporal) and (invest) -> (nontemporal) + new_key = key.replace('(operation)', '(temporal)').replace('(invest)', '(nontemporal)') + if new_key in self._dataset: + warnings.warn( + f"Variable name '{key}' is deprecated. Use '{new_key}' instead.", DeprecationWarning, stacklevel=2 + ) + return self._dataset[new_key] + + # Default to original dataset behavior + return self._dataset[key] + + def __getattr__(self, name): + """Delegate all other attributes to the wrapped dataset.""" + return getattr(self._dataset, name) + + def __contains__(self, key): + """Check if key exists in dataset (with backwards compatibility).""" + if key in self._dataset: + return True + if key in self.DEPRECATED_VARIABLE_MAPPING: + return self.DEPRECATED_VARIABLE_MAPPING[key] in self._dataset + # Check cross-effect variables + if '->' in key and ('(operation)' in key or '(invest)' in key): + new_key = key.replace('(operation)', '(temporal)').replace('(invest)', '(nontemporal)') + return new_key in self._dataset + return False + + @property + def _raw_dataset(self): + """Access to the underlying dataset without backwards compatibility.""" + return self._dataset + + class CalculationResults: """Comprehensive container for optimization calculation results and analysis tools. @@ -180,20 +240,25 @@ def __init__( folder: Results storage folder. model: Linopy optimization model. """ - self.solution = solution + self.solution = BackwardsCompatibleDataset(solution) self.flow_system = flow_system self.summary = summary self.name = name self.model = model self.folder = pathlib.Path(folder) if folder is not None else pathlib.Path.cwd() / 'results' self.components = { - label: ComponentResults.from_json(self, infos) for label, infos in self.solution.attrs['Components'].items() + label: ComponentResults.from_json(self, infos) + for label, infos in self.solution._raw_dataset.attrs['Components'].items() } - self.buses = {label: BusResults.from_json(self, infos) for label, infos in self.solution.attrs['Buses'].items()} + self.buses = { + label: BusResults.from_json(self, infos) + for label, infos in self.solution._raw_dataset.attrs['Buses'].items() + } self.effects = { - label: EffectResults.from_json(self, infos) for label, infos in self.solution.attrs['Effects'].items() + label: EffectResults.from_json(self, infos) + for label, infos in self.solution._raw_dataset.attrs['Effects'].items() } self.timesteps_extra = self.solution.indexes['time'] @@ -246,7 +311,7 @@ def filter_solution( """ if element is not None: return filter_dataset(self[element].solution, variable_dims) - return filter_dataset(self.solution, variable_dims) + return filter_dataset(self.solution._raw_dataset, variable_dims) def plot_heatmap( self, @@ -328,7 +393,7 @@ def to_file( paths = fx_io.CalculationResultsPaths(folder, name) - fx_io.save_dataset_to_netcdf(self.solution, paths.solution, compression=compression) + fx_io.save_dataset_to_netcdf(self.solution._raw_dataset, paths.solution, compression=compression) fx_io.save_dataset_to_netcdf(self.flow_system, paths.flow_system, compression=compression) with open(paths.summary, 'w', encoding='utf-8') as f: @@ -362,7 +427,7 @@ def __init__( self._variable_names = variables self._constraint_names = constraints - self.solution = self._calculation_results.solution[self._variable_names] + self.solution = self._calculation_results.solution._raw_dataset[self._variable_names] @property def variables(self) -> linopy.Variables: From 39660518e7b3dae317d5a47fd7c5eb0d75f5dbfb Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 26 Sep 2025 15:15:01 +0200 Subject: [PATCH 04/27] Improve --- flixopt/effects.py | 260 +++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 251 insertions(+), 9 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index 9c914e2ba..4a7d20747 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -180,6 +180,24 @@ def __init__( # Handle backwards compatibility for deprecated parameters import warnings + # Validate that both old and new parameters are not set simultaneously + self._validate_parameter_conflicts(minimum_operation, minimum_temporal, 'minimum_operation', 'minimum_temporal') + self._validate_parameter_conflicts(maximum_operation, maximum_temporal, 'maximum_operation', 'maximum_temporal') + self._validate_parameter_conflicts(minimum_invest, minimum_nontemporal, 'minimum_invest', 'minimum_nontemporal') + self._validate_parameter_conflicts(maximum_invest, maximum_nontemporal, 'maximum_invest', 'maximum_nontemporal') + self._validate_parameter_conflicts( + minimum_operation_per_hour, + minimum_temporal_per_hour, + 'minimum_operation_per_hour', + 'minimum_temporal_per_hour', + ) + self._validate_parameter_conflicts( + maximum_operation_per_hour, + maximum_temporal_per_hour, + 'maximum_operation_per_hour', + 'maximum_temporal_per_hour', + ) + # Check for deprecated parameters and issue warnings if minimum_operation is not None: warnings.warn( @@ -235,15 +253,239 @@ def __init__( if maximum_temporal_per_hour is None: maximum_temporal_per_hour = maximum_operation_per_hour - # Assign the final values (either new parameters or backwards-compatible ones) - self.minimum_temporal = minimum_temporal - self.maximum_temporal = maximum_temporal - self.minimum_temporal_per_hour = minimum_temporal_per_hour - self.maximum_temporal_per_hour = maximum_temporal_per_hour - self.minimum_nontemporal = minimum_nontemporal - self.maximum_nontemporal = maximum_nontemporal - self.minimum_total = minimum_total - self.maximum_total = maximum_total + # Store values in private attributes for property access + self._minimum_temporal = minimum_temporal if minimum_temporal is not None else minimum_operation + self._maximum_temporal = maximum_temporal if maximum_temporal is not None else maximum_operation + self._minimum_nontemporal = minimum_nontemporal if minimum_nontemporal is not None else minimum_invest + self._maximum_nontemporal = maximum_nontemporal if maximum_nontemporal is not None else maximum_invest + self._minimum_temporal_per_hour = ( + minimum_temporal_per_hour if minimum_temporal_per_hour is not None else minimum_operation_per_hour + ) + self._maximum_temporal_per_hour = ( + maximum_temporal_per_hour if maximum_temporal_per_hour is not None else maximum_operation_per_hour + ) + self._minimum_total = minimum_total + self._maximum_total = maximum_total + + def _validate_parameter_conflicts(self, old_value, new_value, old_name, new_name): + """Validate that both old and new parameters are not set simultaneously.""" + if old_value is not None and new_value is not None: + raise ValueError( + f"Cannot specify both '{old_name}' and '{new_name}' parameters. " + f"Use only '{new_name}' as '{old_name}' is deprecated." + ) + + # New parameter properties + @property + def minimum_temporal(self): + """Minimum allowed total temporal contribution across all timesteps.""" + return self._minimum_temporal + + @minimum_temporal.setter + def minimum_temporal(self, value): + self._minimum_temporal = value + + @property + def maximum_temporal(self): + """Maximum allowed total temporal contribution across all timesteps.""" + return self._maximum_temporal + + @maximum_temporal.setter + def maximum_temporal(self, value): + self._maximum_temporal = value + + @property + def minimum_nontemporal(self): + """Minimum allowed total nontemporal contribution.""" + return self._minimum_nontemporal + + @minimum_nontemporal.setter + def minimum_nontemporal(self, value): + self._minimum_nontemporal = value + + @property + def maximum_nontemporal(self): + """Maximum allowed total nontemporal contribution.""" + return self._maximum_nontemporal + + @maximum_nontemporal.setter + def maximum_nontemporal(self, value): + self._maximum_nontemporal = value + + @property + def minimum_temporal_per_hour(self): + """Minimum allowed temporal contribution per timestep.""" + return self._minimum_temporal_per_hour + + @minimum_temporal_per_hour.setter + def minimum_temporal_per_hour(self, value): + self._minimum_temporal_per_hour = value + + @property + def maximum_temporal_per_hour(self): + """Maximum allowed temporal contribution per timestep.""" + return self._maximum_temporal_per_hour + + @maximum_temporal_per_hour.setter + def maximum_temporal_per_hour(self, value): + self._maximum_temporal_per_hour = value + + @property + def minimum_total(self): + """Minimum allowed total effect (temporal + nontemporal combined).""" + return self._minimum_total + + @minimum_total.setter + def minimum_total(self, value): + self._minimum_total = value + + @property + def maximum_total(self): + """Maximum allowed total effect (temporal + nontemporal combined).""" + return self._maximum_total + + @maximum_total.setter + def maximum_total(self, value): + self._maximum_total = value + + # Backwards compatible properties (deprecated) + @property + def minimum_operation(self): + """[DEPRECATED] Use minimum_temporal instead.""" + import warnings + + warnings.warn( + "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._minimum_temporal + + @minimum_operation.setter + def minimum_operation(self, value): + import warnings + + warnings.warn( + "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._minimum_temporal = value + + @property + def maximum_operation(self): + """[DEPRECATED] Use maximum_temporal instead.""" + import warnings + + warnings.warn( + "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._maximum_temporal + + @maximum_operation.setter + def maximum_operation(self, value): + import warnings + + warnings.warn( + "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._maximum_temporal = value + + @property + def minimum_invest(self): + """[DEPRECATED] Use minimum_nontemporal instead.""" + import warnings + + warnings.warn( + "Property 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._minimum_nontemporal + + @minimum_invest.setter + def minimum_invest(self, value): + import warnings + + warnings.warn( + "Property 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._minimum_nontemporal = value + + @property + def maximum_invest(self): + """[DEPRECATED] Use maximum_nontemporal instead.""" + import warnings + + warnings.warn( + "Property 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._maximum_nontemporal + + @maximum_invest.setter + def maximum_invest(self, value): + import warnings + + warnings.warn( + "Property 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._maximum_nontemporal = value + + @property + def minimum_operation_per_hour(self): + """[DEPRECATED] Use minimum_temporal_per_hour instead.""" + import warnings + + warnings.warn( + "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._minimum_temporal_per_hour + + @minimum_operation_per_hour.setter + def minimum_operation_per_hour(self, value): + import warnings + + warnings.warn( + "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._minimum_temporal_per_hour = value + + @property + def maximum_operation_per_hour(self): + """[DEPRECATED] Use maximum_temporal_per_hour instead.""" + import warnings + + warnings.warn( + "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._maximum_temporal_per_hour + + @maximum_operation_per_hour.setter + def maximum_operation_per_hour(self, value): + import warnings + + warnings.warn( + "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._maximum_temporal_per_hour = value def transform_data(self, flow_system: FlowSystem): self.minimum_temporal_per_hour = flow_system.create_time_series( From 6d5b03f6357b6b0c30fbd1cdc9f47eaff8d705f4 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 26 Sep 2025 15:34:31 +0200 Subject: [PATCH 05/27] Improve --- flixopt/effects.py | 243 ++++++++++++--------------------------------- 1 file changed, 64 insertions(+), 179 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index 4a7d20747..571ed0141 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -165,7 +165,6 @@ def __init__( maximum_invest: Scalar | None = None, minimum_operation_per_hour: NumericDataTS | None = None, maximum_operation_per_hour: NumericDataTS | None = None, - **kwargs, ): super().__init__(label, meta_data=meta_data) self.label = label @@ -178,180 +177,60 @@ def __init__( ) self.specific_share_to_other_effects_invest: EffectValuesUser = specific_share_to_other_effects_invest or {} # Handle backwards compatibility for deprecated parameters - import warnings - # Validate that both old and new parameters are not set simultaneously - self._validate_parameter_conflicts(minimum_operation, minimum_temporal, 'minimum_operation', 'minimum_temporal') - self._validate_parameter_conflicts(maximum_operation, maximum_temporal, 'maximum_operation', 'maximum_temporal') - self._validate_parameter_conflicts(minimum_invest, minimum_nontemporal, 'minimum_invest', 'minimum_nontemporal') - self._validate_parameter_conflicts(maximum_invest, maximum_nontemporal, 'maximum_invest', 'maximum_nontemporal') - self._validate_parameter_conflicts( - minimum_operation_per_hour, - minimum_temporal_per_hour, - 'minimum_operation_per_hour', - 'minimum_temporal_per_hour', - ) - self._validate_parameter_conflicts( - maximum_operation_per_hour, - maximum_temporal_per_hour, - 'maximum_operation_per_hour', - 'maximum_temporal_per_hour', - ) - - # Check for deprecated parameters and issue warnings - if minimum_operation is not None: - warnings.warn( - "Parameter 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - if minimum_temporal is None: - minimum_temporal = minimum_operation - - if maximum_operation is not None: - warnings.warn( - "Parameter 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", - DeprecationWarning, - stacklevel=2, + if minimum_operation is not None and minimum_temporal is not None: + raise ValueError( + "Cannot specify both 'minimum_operation' and 'minimum_temporal' parameters. Use only 'minimum_temporal' as 'minimum_operation' is deprecated." ) - if maximum_temporal is None: - maximum_temporal = maximum_operation - - if minimum_invest is not None: - warnings.warn( - "Parameter 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", - DeprecationWarning, - stacklevel=2, + if maximum_operation is not None and maximum_temporal is not None: + raise ValueError( + "Cannot specify both 'maximum_operation' and 'maximum_temporal' parameters. Use only 'maximum_temporal' as 'maximum_operation' is deprecated." ) - if minimum_nontemporal is None: - minimum_nontemporal = minimum_invest - - if maximum_invest is not None: - warnings.warn( - "Parameter 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", - DeprecationWarning, - stacklevel=2, + if minimum_invest is not None and minimum_nontemporal is not None: + raise ValueError( + "Cannot specify both 'minimum_invest' and 'minimum_nontemporal' parameters. Use only 'minimum_nontemporal' as 'minimum_invest' is deprecated." ) - if maximum_nontemporal is None: - maximum_nontemporal = maximum_invest - - if minimum_operation_per_hour is not None: - warnings.warn( - "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=2, + if maximum_invest is not None and maximum_nontemporal is not None: + raise ValueError( + "Cannot specify both 'maximum_invest' and 'maximum_nontemporal' parameters. Use only 'maximum_nontemporal' as 'maximum_invest' is deprecated." ) - if minimum_temporal_per_hour is None: - minimum_temporal_per_hour = minimum_operation_per_hour - - if maximum_operation_per_hour is not None: - warnings.warn( - "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=2, + if minimum_operation_per_hour is not None and minimum_temporal_per_hour is not None: + raise ValueError( + "Cannot specify both 'minimum_operation_per_hour' and 'minimum_temporal_per_hour' parameters. Use only 'minimum_temporal_per_hour' as 'minimum_operation_per_hour' is deprecated." ) - if maximum_temporal_per_hour is None: - maximum_temporal_per_hour = maximum_operation_per_hour - - # Store values in private attributes for property access - self._minimum_temporal = minimum_temporal if minimum_temporal is not None else minimum_operation - self._maximum_temporal = maximum_temporal if maximum_temporal is not None else maximum_operation - self._minimum_nontemporal = minimum_nontemporal if minimum_nontemporal is not None else minimum_invest - self._maximum_nontemporal = maximum_nontemporal if maximum_nontemporal is not None else maximum_invest - self._minimum_temporal_per_hour = ( - minimum_temporal_per_hour if minimum_temporal_per_hour is not None else minimum_operation_per_hour - ) - self._maximum_temporal_per_hour = ( - maximum_temporal_per_hour if maximum_temporal_per_hour is not None else maximum_operation_per_hour - ) - self._minimum_total = minimum_total - self._maximum_total = maximum_total - - def _validate_parameter_conflicts(self, old_value, new_value, old_name, new_name): - """Validate that both old and new parameters are not set simultaneously.""" - if old_value is not None and new_value is not None: + if maximum_operation_per_hour is not None and maximum_temporal_per_hour is not None: raise ValueError( - f"Cannot specify both '{old_name}' and '{new_name}' parameters. " - f"Use only '{new_name}' as '{old_name}' is deprecated." + "Cannot specify both 'maximum_operation_per_hour' and 'maximum_temporal_per_hour' parameters. Use only 'maximum_temporal_per_hour' as 'maximum_operation_per_hour' is deprecated." ) - # New parameter properties - @property - def minimum_temporal(self): - """Minimum allowed total temporal contribution across all timesteps.""" - return self._minimum_temporal - - @minimum_temporal.setter - def minimum_temporal(self, value): - self._minimum_temporal = value - - @property - def maximum_temporal(self): - """Maximum allowed total temporal contribution across all timesteps.""" - return self._maximum_temporal - - @maximum_temporal.setter - def maximum_temporal(self, value): - self._maximum_temporal = value - - @property - def minimum_nontemporal(self): - """Minimum allowed total nontemporal contribution.""" - return self._minimum_nontemporal - - @minimum_nontemporal.setter - def minimum_nontemporal(self, value): - self._minimum_nontemporal = value - - @property - def maximum_nontemporal(self): - """Maximum allowed total nontemporal contribution.""" - return self._maximum_nontemporal - - @maximum_nontemporal.setter - def maximum_nontemporal(self, value): - self._maximum_nontemporal = value - - @property - def minimum_temporal_per_hour(self): - """Minimum allowed temporal contribution per timestep.""" - return self._minimum_temporal_per_hour - - @minimum_temporal_per_hour.setter - def minimum_temporal_per_hour(self, value): - self._minimum_temporal_per_hour = value - - @property - def maximum_temporal_per_hour(self): - """Maximum allowed temporal contribution per timestep.""" - return self._maximum_temporal_per_hour - - @maximum_temporal_per_hour.setter - def maximum_temporal_per_hour(self, value): - self._maximum_temporal_per_hour = value - - @property - def minimum_total(self): - """Minimum allowed total effect (temporal + nontemporal combined).""" - return self._minimum_total - - @minimum_total.setter - def minimum_total(self, value): - self._minimum_total = value - - @property - def maximum_total(self): - """Maximum allowed total effect (temporal + nontemporal combined).""" - return self._maximum_total - - @maximum_total.setter - def maximum_total(self, value): - self._maximum_total = value + # Set attributes directly, handling backwards compatibility parameters + self.minimum_temporal = minimum_temporal + self.maximum_temporal = maximum_temporal + self.minimum_nontemporal = minimum_nontemporal + self.maximum_nontemporal = maximum_nontemporal + self.minimum_temporal_per_hour = minimum_temporal_per_hour + self.maximum_temporal_per_hour = maximum_temporal_per_hour + self.minimum_total = minimum_total + self.maximum_total = maximum_total + + # Handle backwards compatibility parameter assignments + if minimum_operation is not None: + self.minimum_operation = minimum_operation + if maximum_operation is not None: + self.maximum_operation = maximum_operation + if minimum_invest is not None: + self.minimum_invest = minimum_invest + if maximum_invest is not None: + self.maximum_invest = maximum_invest + if minimum_operation_per_hour is not None: + self.minimum_operation_per_hour = minimum_operation_per_hour + if maximum_operation_per_hour is not None: + self.maximum_operation_per_hour = maximum_operation_per_hour # Backwards compatible properties (deprecated) @property def minimum_operation(self): - """[DEPRECATED] Use minimum_temporal instead.""" + """DEPRECATED: Use 'minimum_temporal' property instead.""" import warnings warnings.warn( @@ -359,10 +238,11 @@ def minimum_operation(self): DeprecationWarning, stacklevel=2, ) - return self._minimum_temporal + return self.minimum_temporal @minimum_operation.setter def minimum_operation(self, value): + """DEPRECATED: Use 'minimum_temporal' property instead.""" import warnings warnings.warn( @@ -370,11 +250,11 @@ def minimum_operation(self, value): DeprecationWarning, stacklevel=2, ) - self._minimum_temporal = value + self.minimum_temporal = value @property def maximum_operation(self): - """[DEPRECATED] Use maximum_temporal instead.""" + """DEPRECATED: Use 'maximum_temporal' property instead.""" import warnings warnings.warn( @@ -382,10 +262,11 @@ def maximum_operation(self): DeprecationWarning, stacklevel=2, ) - return self._maximum_temporal + return self.maximum_temporal @maximum_operation.setter def maximum_operation(self, value): + """DEPRECATED: Use 'maximum_temporal' property instead.""" import warnings warnings.warn( @@ -393,11 +274,11 @@ def maximum_operation(self, value): DeprecationWarning, stacklevel=2, ) - self._maximum_temporal = value + self.maximum_temporal = value @property def minimum_invest(self): - """[DEPRECATED] Use minimum_nontemporal instead.""" + """DEPRECATED: Use 'minimum_nontemporal' property instead.""" import warnings warnings.warn( @@ -405,10 +286,11 @@ def minimum_invest(self): DeprecationWarning, stacklevel=2, ) - return self._minimum_nontemporal + return self.minimum_nontemporal @minimum_invest.setter def minimum_invest(self, value): + """DEPRECATED: Use 'minimum_nontemporal' property instead.""" import warnings warnings.warn( @@ -416,11 +298,11 @@ def minimum_invest(self, value): DeprecationWarning, stacklevel=2, ) - self._minimum_nontemporal = value + self.minimum_nontemporal = value @property def maximum_invest(self): - """[DEPRECATED] Use maximum_nontemporal instead.""" + """DEPRECATED: Use 'maximum_nontemporal' property instead.""" import warnings warnings.warn( @@ -428,10 +310,11 @@ def maximum_invest(self): DeprecationWarning, stacklevel=2, ) - return self._maximum_nontemporal + return self.maximum_nontemporal @maximum_invest.setter def maximum_invest(self, value): + """DEPRECATED: Use 'maximum_nontemporal' property instead.""" import warnings warnings.warn( @@ -439,11 +322,11 @@ def maximum_invest(self, value): DeprecationWarning, stacklevel=2, ) - self._maximum_nontemporal = value + self.maximum_nontemporal = value @property def minimum_operation_per_hour(self): - """[DEPRECATED] Use minimum_temporal_per_hour instead.""" + """DEPRECATED: Use 'minimum_temporal_per_hour' property instead.""" import warnings warnings.warn( @@ -451,10 +334,11 @@ def minimum_operation_per_hour(self): DeprecationWarning, stacklevel=2, ) - return self._minimum_temporal_per_hour + return self.minimum_temporal_per_hour @minimum_operation_per_hour.setter def minimum_operation_per_hour(self, value): + """DEPRECATED: Use 'minimum_temporal_per_hour' property instead.""" import warnings warnings.warn( @@ -462,11 +346,11 @@ def minimum_operation_per_hour(self, value): DeprecationWarning, stacklevel=2, ) - self._minimum_temporal_per_hour = value + self.minimum_temporal_per_hour = value @property def maximum_operation_per_hour(self): - """[DEPRECATED] Use maximum_temporal_per_hour instead.""" + """DEPRECATED: Use 'maximum_temporal_per_hour' property instead.""" import warnings warnings.warn( @@ -474,10 +358,11 @@ def maximum_operation_per_hour(self): DeprecationWarning, stacklevel=2, ) - return self._maximum_temporal_per_hour + return self.maximum_temporal_per_hour @maximum_operation_per_hour.setter def maximum_operation_per_hour(self, value): + """DEPRECATED: Use 'maximum_temporal_per_hour' property instead.""" import warnings warnings.warn( @@ -485,7 +370,7 @@ def maximum_operation_per_hour(self, value): DeprecationWarning, stacklevel=2, ) - self._maximum_temporal_per_hour = value + self.maximum_temporal_per_hour = value def transform_data(self, flow_system: FlowSystem): self.minimum_temporal_per_hour = flow_system.create_time_series( From ddcb6be3032fc6606e640b425697afbcded25e1d Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 10:26:26 +0200 Subject: [PATCH 06/27] Bugfix IO with deprectaed params --- flixopt/effects.py | 117 +++++++++++++++++++++++++++++---------------- 1 file changed, 76 insertions(+), 41 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index 571ed0141..69359fcdb 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -158,13 +158,7 @@ def __init__( maximum_temporal_per_hour: NumericDataTS | None = None, minimum_total: Scalar | None = None, maximum_total: Scalar | None = None, - # Backwards compatibility parameters (deprecated) - minimum_operation: Scalar | None = None, - maximum_operation: Scalar | None = None, - minimum_invest: Scalar | None = None, - maximum_invest: Scalar | None = None, - minimum_operation_per_hour: NumericDataTS | None = None, - maximum_operation_per_hour: NumericDataTS | None = None, + **kwargs, ): super().__init__(label, meta_data=meta_data) self.label = label @@ -176,34 +170,89 @@ def __init__( specific_share_to_other_effects_operation or {} ) self.specific_share_to_other_effects_invest: EffectValuesUser = specific_share_to_other_effects_invest or {} + # Handle backwards compatibility for deprecated parameters - # Validate that both old and new parameters are not set simultaneously - if minimum_operation is not None and minimum_temporal is not None: - raise ValueError( - "Cannot specify both 'minimum_operation' and 'minimum_temporal' parameters. Use only 'minimum_temporal' as 'minimum_operation' is deprecated." + import warnings + + # Extract deprecated parameters from kwargs + minimum_operation = kwargs.pop('minimum_operation', None) + maximum_operation = kwargs.pop('maximum_operation', None) + minimum_invest = kwargs.pop('minimum_invest', None) + maximum_invest = kwargs.pop('maximum_invest', None) + minimum_operation_per_hour = kwargs.pop('minimum_operation_per_hour', None) + maximum_operation_per_hour = kwargs.pop('maximum_operation_per_hour', None) + + # Handle minimum_temporal + if minimum_operation is not None: + warnings.warn( + "Parameter 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", + DeprecationWarning, + stacklevel=2, ) - if maximum_operation is not None and maximum_temporal is not None: - raise ValueError( - "Cannot specify both 'maximum_operation' and 'maximum_temporal' parameters. Use only 'maximum_temporal' as 'maximum_operation' is deprecated." + if minimum_temporal is not None: + raise ValueError('Either minimum_operation or minimum_temporal can be specified, but not both.') + minimum_temporal = minimum_operation + + # Handle maximum_temporal + if maximum_operation is not None: + warnings.warn( + "Parameter 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", + DeprecationWarning, + stacklevel=2, ) - if minimum_invest is not None and minimum_nontemporal is not None: - raise ValueError( - "Cannot specify both 'minimum_invest' and 'minimum_nontemporal' parameters. Use only 'minimum_nontemporal' as 'minimum_invest' is deprecated." + if maximum_temporal is not None: + raise ValueError('Either maximum_operation or maximum_temporal can be specified, but not both.') + maximum_temporal = maximum_operation + + # Handle minimum_nontemporal + if minimum_invest is not None: + warnings.warn( + "Parameter 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, ) - if maximum_invest is not None and maximum_nontemporal is not None: - raise ValueError( - "Cannot specify both 'maximum_invest' and 'maximum_nontemporal' parameters. Use only 'maximum_nontemporal' as 'maximum_invest' is deprecated." + if minimum_nontemporal is not None: + raise ValueError('Either minimum_invest or minimum_nontemporal can be specified, but not both.') + minimum_nontemporal = minimum_invest + + # Handle maximum_nontemporal + if maximum_invest is not None: + warnings.warn( + "Parameter 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, ) - if minimum_operation_per_hour is not None and minimum_temporal_per_hour is not None: - raise ValueError( - "Cannot specify both 'minimum_operation_per_hour' and 'minimum_temporal_per_hour' parameters. Use only 'minimum_temporal_per_hour' as 'minimum_operation_per_hour' is deprecated." + if maximum_nontemporal is not None: + raise ValueError('Either maximum_invest or maximum_nontemporal can be specified, but not both.') + maximum_nontemporal = maximum_invest + + # Handle minimum_temporal_per_hour + if minimum_operation_per_hour is not None: + warnings.warn( + "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, ) - if maximum_operation_per_hour is not None and maximum_temporal_per_hour is not None: - raise ValueError( - "Cannot specify both 'maximum_operation_per_hour' and 'maximum_temporal_per_hour' parameters. Use only 'maximum_temporal_per_hour' as 'maximum_operation_per_hour' is deprecated." + if minimum_temporal_per_hour is not None: + raise ValueError( + 'Either minimum_operation_per_hour or minimum_temporal_per_hour can be specified, but not both.' + ) + minimum_temporal_per_hour = minimum_operation_per_hour + + # Handle maximum_temporal_per_hour + if maximum_operation_per_hour is not None: + warnings.warn( + "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, ) + if maximum_temporal_per_hour is not None: + raise ValueError( + 'Either maximum_operation_per_hour or maximum_temporal_per_hour can be specified, but not both.' + ) + maximum_temporal_per_hour = maximum_operation_per_hour - # Set attributes directly, handling backwards compatibility parameters + # Set attributes directly self.minimum_temporal = minimum_temporal self.maximum_temporal = maximum_temporal self.minimum_nontemporal = minimum_nontemporal @@ -213,20 +262,6 @@ def __init__( self.minimum_total = minimum_total self.maximum_total = maximum_total - # Handle backwards compatibility parameter assignments - if minimum_operation is not None: - self.minimum_operation = minimum_operation - if maximum_operation is not None: - self.maximum_operation = maximum_operation - if minimum_invest is not None: - self.minimum_invest = minimum_invest - if maximum_invest is not None: - self.maximum_invest = maximum_invest - if minimum_operation_per_hour is not None: - self.minimum_operation_per_hour = minimum_operation_per_hour - if maximum_operation_per_hour is not None: - self.maximum_operation_per_hour = maximum_operation_per_hour - # Backwards compatible properties (deprecated) @property def minimum_operation(self): From de6189527f37977c546f5bd2323a4f5030745397 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 10:36:51 +0200 Subject: [PATCH 07/27] Add guards for extra kwargs --- flixopt/components.py | 33 +++++++++++++++++++++++++++++++++ flixopt/effects.py | 11 +++++++++++ 2 files changed, 44 insertions(+) diff --git a/flixopt/components.py b/flixopt/components.py index 9dd0fc52b..9d98239ab 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -1001,6 +1001,17 @@ def __init__( ) prevent_simultaneous_flow_rates = prevent_simultaneous_sink_and_source + # Check for any remaining unexpected kwargs using inspect module + import inspect + + sig = inspect.signature(self.__init__) + known_params = set(sig.parameters.keys()) - {'self', 'kwargs'} + # Also filter out 'kwargs' itself which can appear during deserialization + extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'} + if extra_kwargs: + unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) + raise TypeError(f'SourceAndSink.__init__() got unexpected keyword argument(s): {unexpected_params}') + super().__init__( label, inputs=inputs, @@ -1133,6 +1144,17 @@ def __init__( raise ValueError('Either source or outputs can be specified, but not both.') outputs = [source] + # Check for any remaining unexpected kwargs using inspect module + import inspect + + sig = inspect.signature(self.__init__) + known_params = set(sig.parameters.keys()) - {'self', 'kwargs'} + # Also filter out 'kwargs' itself which can appear during deserialization + extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'} + if extra_kwargs: + unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) + raise TypeError(f'Source.__init__() got unexpected keyword argument(s): {unexpected_params}') + self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates super().__init__( label, @@ -1261,6 +1283,17 @@ def __init__( raise ValueError('Either sink or inputs can be specified, but not both.') inputs = [sink] + # Check for any remaining unexpected kwargs using inspect module + import inspect + + sig = inspect.signature(self.__init__) + known_params = set(sig.parameters.keys()) - {'self', 'kwargs'} + # Also filter out 'kwargs' itself which can appear during deserialization + extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'} + if extra_kwargs: + unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) + raise TypeError(f'Sink.__init__() got unexpected keyword argument(s): {unexpected_params}') + self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates super().__init__( label, diff --git a/flixopt/effects.py b/flixopt/effects.py index 69359fcdb..19af85ecc 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -252,6 +252,17 @@ def __init__( ) maximum_temporal_per_hour = maximum_operation_per_hour + # Check for any remaining unexpected kwargs using inspect module + import inspect + + sig = inspect.signature(self.__init__) + known_params = set(sig.parameters.keys()) - {'self', 'kwargs'} + # Also filter out 'kwargs' itself which can appear during deserialization + extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'} + if extra_kwargs: + unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) + raise TypeError(f'Effect.__init__() got unexpected keyword argument(s): {unexpected_params}') + # Set attributes directly self.minimum_temporal = minimum_temporal self.maximum_temporal = maximum_temporal From 115f3a051ebfb8a287fc77fd07f1c0e786fe5291 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 10:41:25 +0200 Subject: [PATCH 08/27] Add guards for extra kwargs --- flixopt/components.py | 36 ++++++------------------------------ flixopt/effects.py | 12 ++---------- flixopt/structure.py | 30 ++++++++++++++++++++++++++++++ 3 files changed, 38 insertions(+), 40 deletions(-) diff --git a/flixopt/components.py b/flixopt/components.py index 9d98239ab..3bb435cb1 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -1001,16 +1001,8 @@ def __init__( ) prevent_simultaneous_flow_rates = prevent_simultaneous_sink_and_source - # Check for any remaining unexpected kwargs using inspect module - import inspect - - sig = inspect.signature(self.__init__) - known_params = set(sig.parameters.keys()) - {'self', 'kwargs'} - # Also filter out 'kwargs' itself which can appear during deserialization - extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'} - if extra_kwargs: - unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) - raise TypeError(f'SourceAndSink.__init__() got unexpected keyword argument(s): {unexpected_params}') + # Validate any remaining unexpected kwargs + self._validate_kwargs(kwargs) super().__init__( label, @@ -1144,16 +1136,8 @@ def __init__( raise ValueError('Either source or outputs can be specified, but not both.') outputs = [source] - # Check for any remaining unexpected kwargs using inspect module - import inspect - - sig = inspect.signature(self.__init__) - known_params = set(sig.parameters.keys()) - {'self', 'kwargs'} - # Also filter out 'kwargs' itself which can appear during deserialization - extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'} - if extra_kwargs: - unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) - raise TypeError(f'Source.__init__() got unexpected keyword argument(s): {unexpected_params}') + # Validate any remaining unexpected kwargs + self._validate_kwargs(kwargs) self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates super().__init__( @@ -1283,16 +1267,8 @@ def __init__( raise ValueError('Either sink or inputs can be specified, but not both.') inputs = [sink] - # Check for any remaining unexpected kwargs using inspect module - import inspect - - sig = inspect.signature(self.__init__) - known_params = set(sig.parameters.keys()) - {'self', 'kwargs'} - # Also filter out 'kwargs' itself which can appear during deserialization - extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'} - if extra_kwargs: - unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) - raise TypeError(f'Sink.__init__() got unexpected keyword argument(s): {unexpected_params}') + # Validate any remaining unexpected kwargs + self._validate_kwargs(kwargs) self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates super().__init__( diff --git a/flixopt/effects.py b/flixopt/effects.py index 19af85ecc..3e404ec65 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -252,16 +252,8 @@ def __init__( ) maximum_temporal_per_hour = maximum_operation_per_hour - # Check for any remaining unexpected kwargs using inspect module - import inspect - - sig = inspect.signature(self.__init__) - known_params = set(sig.parameters.keys()) - {'self', 'kwargs'} - # Also filter out 'kwargs' itself which can appear during deserialization - extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'} - if extra_kwargs: - unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) - raise TypeError(f'Effect.__init__() got unexpected keyword argument(s): {unexpected_params}') + # Validate any remaining unexpected kwargs + self._validate_kwargs(kwargs) # Set attributes directly self.minimum_temporal = minimum_temporal diff --git a/flixopt/structure.py b/flixopt/structure.py index c5519066c..12cd99c13 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -202,6 +202,36 @@ def _serialize_dict(self, d): """Serialize a dictionary of items.""" return {k: self._serialize_value(v) for k, v in d.items()} + def _validate_kwargs(self, kwargs: dict, class_name: str = None) -> None: + """ + Validate that no unexpected keyword arguments are present in kwargs. + + This method uses inspect to get the actual function signature and filters out + any parameters that are not defined in the __init__ method, while also + handling the special case of 'kwargs' itself which can appear during deserialization. + + Args: + kwargs: Dictionary of keyword arguments to validate + class_name: Optional class name for error messages. If None, uses self.__class__.__name__ + + Raises: + TypeError: If unexpected keyword arguments are found + """ + if not kwargs: + return + + import inspect + + sig = inspect.signature(self.__init__) + known_params = set(sig.parameters.keys()) - {'self', 'kwargs'} + # Also filter out 'kwargs' itself which can appear during deserialization + extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'} + + if extra_kwargs: + class_name = class_name or self.__class__.__name__ + unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) + raise TypeError(f'{class_name}.__init__() got unexpected keyword argument(s): {unexpected_params}') + @classmethod def _deserialize_dict(cls, data: dict) -> dict | Interface: if '__class__' in data: From 4a8257422e94dbdfe3c4249ad70e4642e79122a1 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 10:51:47 +0200 Subject: [PATCH 09/27] centralize logic for deprectaed params --- flixopt/components.py | 97 ++++++++++++++++++++++---------------- flixopt/effects.py | 106 +++++++++--------------------------------- flixopt/structure.py | 43 +++++++++++++++++ 3 files changed, 122 insertions(+), 124 deletions(-) diff --git a/flixopt/components.py b/flixopt/components.py index 3bb435cb1..1cfc15d9d 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -970,36 +970,39 @@ def __init__( meta_data: dict | None = None, **kwargs, ): - source = kwargs.pop('source', None) - sink = kwargs.pop('sink', None) - prevent_simultaneous_sink_and_source = kwargs.pop('prevent_simultaneous_sink_and_source', None) - if source is not None: - warnings.warn( - 'The use of the source argument is deprecated. Use the outputs argument instead.', - DeprecationWarning, - stacklevel=2, - ) + # Handle backwards compatibility for deprecated parameters + deprecated_mappings = { + 'source': 'outputs', + 'sink': 'inputs', + 'prevent_simultaneous_sink_and_source': 'prevent_simultaneous_flow_rates', + } + + # Set attribute values to allow conflict checking + self._temp_outputs = outputs + self._temp_inputs = inputs + self._temp_prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates + + # Handle deprecated parameters using centralized method + updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) + + # Update parameters with deprecated values if provided, with special handling for list parameters + if 'outputs' in updated_params: if outputs is not None: raise ValueError('Either source or outputs can be specified, but not both.') - outputs = [source] + outputs = [updated_params['outputs']] - if sink is not None: - warnings.warn( - 'The use of the sink argument is deprecated. Use the inputs argument instead.', - DeprecationWarning, - stacklevel=2, - ) + if 'inputs' in updated_params: if inputs is not None: raise ValueError('Either sink or inputs can be specified, but not both.') - inputs = [sink] + inputs = [updated_params['inputs']] - if prevent_simultaneous_sink_and_source is not None: - warnings.warn( - 'The use of the prevent_simultaneous_sink_and_source argument is deprecated. Use the prevent_simultaneous_flow_rates argument instead.', - DeprecationWarning, - stacklevel=2, - ) - prevent_simultaneous_flow_rates = prevent_simultaneous_sink_and_source + if 'prevent_simultaneous_flow_rates' in updated_params: + prevent_simultaneous_flow_rates = updated_params['prevent_simultaneous_flow_rates'] + + # Clean up temporary attributes + delattr(self, '_temp_outputs') + delattr(self, '_temp_inputs') + delattr(self, '_temp_prevent_simultaneous_flow_rates') # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -1125,16 +1128,23 @@ def __init__( prevent_simultaneous_flow_rates: bool = False, **kwargs, ): - source = kwargs.pop('source', None) - if source is not None: - warnings.warn( - 'The use of the source argument is deprecated. Use the outputs argument instead.', - DeprecationWarning, - stacklevel=2, - ) + # Handle backwards compatibility for deprecated parameters + deprecated_mappings = {'source': 'outputs'} + + # Set attribute to allow conflict checking + self._temp_outputs = outputs + + # Handle deprecated parameters using centralized method + updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) + + # Update parameters with deprecated values if provided + if 'outputs' in updated_params: if outputs is not None: raise ValueError('Either source or outputs can be specified, but not both.') - outputs = [source] + outputs = [updated_params['outputs']] + + # Clean up temporary attribute + delattr(self, '_temp_outputs') # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -1256,16 +1266,23 @@ def __init__( Note: The deprecated `sink` kwarg is accepted for compatibility but will be removed in future releases. """ - sink = kwargs.pop('sink', None) - if sink is not None: - warnings.warn( - 'The use of the sink argument is deprecated. Use the inputs argument instead.', - DeprecationWarning, - stacklevel=2, - ) + # Handle backwards compatibility for deprecated parameters + deprecated_mappings = {'sink': 'inputs'} + + # Set attribute to allow conflict checking + self._temp_inputs = inputs + + # Handle deprecated parameters using centralized method + updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) + + # Update parameters with deprecated values if provided + if 'inputs' in updated_params: if inputs is not None: raise ValueError('Either sink or inputs can be specified, but not both.') - inputs = [sink] + inputs = [updated_params['inputs']] + + # Clean up temporary attribute + delattr(self, '_temp_inputs') # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) diff --git a/flixopt/effects.py b/flixopt/effects.py index 3e404ec65..5c5059901 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -172,96 +172,34 @@ def __init__( self.specific_share_to_other_effects_invest: EffectValuesUser = specific_share_to_other_effects_invest or {} # Handle backwards compatibility for deprecated parameters - import warnings - - # Extract deprecated parameters from kwargs - minimum_operation = kwargs.pop('minimum_operation', None) - maximum_operation = kwargs.pop('maximum_operation', None) - minimum_invest = kwargs.pop('minimum_invest', None) - maximum_invest = kwargs.pop('maximum_invest', None) - minimum_operation_per_hour = kwargs.pop('minimum_operation_per_hour', None) - maximum_operation_per_hour = kwargs.pop('maximum_operation_per_hour', None) - - # Handle minimum_temporal - if minimum_operation is not None: - warnings.warn( - "Parameter 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - if minimum_temporal is not None: - raise ValueError('Either minimum_operation or minimum_temporal can be specified, but not both.') - minimum_temporal = minimum_operation - - # Handle maximum_temporal - if maximum_operation is not None: - warnings.warn( - "Parameter 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - if maximum_temporal is not None: - raise ValueError('Either maximum_operation or maximum_temporal can be specified, but not both.') - maximum_temporal = maximum_operation - - # Handle minimum_nontemporal - if minimum_invest is not None: - warnings.warn( - "Parameter 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - if minimum_nontemporal is not None: - raise ValueError('Either minimum_invest or minimum_nontemporal can be specified, but not both.') - minimum_nontemporal = minimum_invest - - # Handle maximum_nontemporal - if maximum_invest is not None: - warnings.warn( - "Parameter 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - if maximum_nontemporal is not None: - raise ValueError('Either maximum_invest or maximum_nontemporal can be specified, but not both.') - maximum_nontemporal = maximum_invest - - # Handle minimum_temporal_per_hour - if minimum_operation_per_hour is not None: - warnings.warn( - "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - if minimum_temporal_per_hour is not None: - raise ValueError( - 'Either minimum_operation_per_hour or minimum_temporal_per_hour can be specified, but not both.' - ) - minimum_temporal_per_hour = minimum_operation_per_hour - - # Handle maximum_temporal_per_hour - if maximum_operation_per_hour is not None: - warnings.warn( - "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - if maximum_temporal_per_hour is not None: - raise ValueError( - 'Either maximum_operation_per_hour or maximum_temporal_per_hour can be specified, but not both.' - ) - maximum_temporal_per_hour = maximum_operation_per_hour - - # Validate any remaining unexpected kwargs - self._validate_kwargs(kwargs) - - # Set attributes directly + deprecated_mappings = { + 'minimum_operation': 'minimum_temporal', + 'maximum_operation': 'maximum_temporal', + 'minimum_invest': 'minimum_nontemporal', + 'maximum_invest': 'maximum_nontemporal', + 'minimum_operation_per_hour': 'minimum_temporal_per_hour', + 'maximum_operation_per_hour': 'maximum_temporal_per_hour', + } + + # Set attribute values before calling _handle_deprecated_params to allow conflict checking self.minimum_temporal = minimum_temporal self.maximum_temporal = maximum_temporal self.minimum_nontemporal = minimum_nontemporal self.maximum_nontemporal = maximum_nontemporal self.minimum_temporal_per_hour = minimum_temporal_per_hour self.maximum_temporal_per_hour = maximum_temporal_per_hour + + # Handle deprecated parameters using centralized method + updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) + + # Update attributes with deprecated values if provided + for param_name, param_value in updated_params.items(): + setattr(self, param_name, param_value) + + # Validate any remaining unexpected kwargs + self._validate_kwargs(kwargs) + + # Set remaining attributes self.minimum_total = minimum_total self.maximum_total = maximum_total diff --git a/flixopt/structure.py b/flixopt/structure.py index 12cd99c13..664e52107 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -232,6 +232,49 @@ def _validate_kwargs(self, kwargs: dict, class_name: str = None) -> None: unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) raise TypeError(f'{class_name}.__init__() got unexpected keyword argument(s): {unexpected_params}') + def _handle_deprecated_params(self, kwargs: dict, deprecated_mappings: dict) -> dict: + """ + Handle deprecated parameters by extracting them from kwargs and issuing warnings. + + This method centralizes the deprecated parameter handling pattern used across classes. + It extracts deprecated parameters, issues deprecation warnings, checks for conflicts, + and returns the updated parameter values. + + Args: + kwargs: Dictionary of keyword arguments that may contain deprecated parameters + deprecated_mappings: Dictionary mapping deprecated parameter names to new parameter names + Format: {'deprecated_name': 'new_name'} + + Returns: + Dictionary with new parameter names as keys and their values + + Raises: + ValueError: If both deprecated and new parameters are specified simultaneously + """ + import warnings + + updated_params = {} + + for deprecated_name, new_name in deprecated_mappings.items(): + deprecated_value = kwargs.pop(deprecated_name, None) + + if deprecated_value is not None: + # Issue deprecation warning + warnings.warn( + f"Parameter '{deprecated_name}' is deprecated. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=3, # Skip this method and the calling method to point to user code + ) + + # Check if the new parameter is already set + current_value = getattr(self, new_name, None) + if current_value is not None: + raise ValueError(f"Either '{deprecated_name}' or '{new_name}' can be specified, but not both.") + + updated_params[new_name] = deprecated_value + + return updated_params + @classmethod def _deserialize_dict(cls, data: dict) -> dict | Interface: if '__class__' in data: From 9f4c1f6acddd50b60199fd67ea6be55491752faa Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 10:56:04 +0200 Subject: [PATCH 10/27] Move handlign from centralized back to classes in a dedicated method --- flixopt/components.py | 151 +++++++++++++++++++++++------------------- flixopt/effects.py | 143 ++++++++++++++++++++++++++++++++------- flixopt/structure.py | 43 ------------ 3 files changed, 204 insertions(+), 133 deletions(-) diff --git a/flixopt/components.py b/flixopt/components.py index 1cfc15d9d..26c49e489 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -970,39 +970,10 @@ def __init__( meta_data: dict | None = None, **kwargs, ): - # Handle backwards compatibility for deprecated parameters - deprecated_mappings = { - 'source': 'outputs', - 'sink': 'inputs', - 'prevent_simultaneous_sink_and_source': 'prevent_simultaneous_flow_rates', - } - - # Set attribute values to allow conflict checking - self._temp_outputs = outputs - self._temp_inputs = inputs - self._temp_prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates - - # Handle deprecated parameters using centralized method - updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) - - # Update parameters with deprecated values if provided, with special handling for list parameters - if 'outputs' in updated_params: - if outputs is not None: - raise ValueError('Either source or outputs can be specified, but not both.') - outputs = [updated_params['outputs']] - - if 'inputs' in updated_params: - if inputs is not None: - raise ValueError('Either sink or inputs can be specified, but not both.') - inputs = [updated_params['inputs']] - - if 'prevent_simultaneous_flow_rates' in updated_params: - prevent_simultaneous_flow_rates = updated_params['prevent_simultaneous_flow_rates'] - - # Clean up temporary attributes - delattr(self, '_temp_outputs') - delattr(self, '_temp_inputs') - delattr(self, '_temp_prevent_simultaneous_flow_rates') + # Handle deprecated parameters + inputs, outputs, prevent_simultaneous_flow_rates = self._handle_deprecated_source_and_sink_params( + kwargs, inputs, outputs, prevent_simultaneous_flow_rates + ) # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -1016,6 +987,46 @@ def __init__( ) self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates + def _handle_deprecated_source_and_sink_params(self, kwargs, inputs, outputs, prevent_simultaneous_flow_rates): + """Handle deprecated parameter names for SourceAndSink class.""" + import warnings + + # Handle deprecated 'source' parameter + source = kwargs.pop('source', None) + if source is not None: + warnings.warn( + 'The use of the source argument is deprecated. Use the outputs argument instead.', + DeprecationWarning, + stacklevel=3, + ) + if outputs is not None: + raise ValueError('Either source or outputs can be specified, but not both.') + outputs = [source] + + # Handle deprecated 'sink' parameter + sink = kwargs.pop('sink', None) + if sink is not None: + warnings.warn( + 'The use of the sink argument is deprecated. Use the inputs argument instead.', + DeprecationWarning, + stacklevel=3, + ) + if inputs is not None: + raise ValueError('Either sink or inputs can be specified, but not both.') + inputs = [sink] + + # Handle deprecated 'prevent_simultaneous_sink_and_source' parameter + prevent_simultaneous_sink_and_source = kwargs.pop('prevent_simultaneous_sink_and_source', None) + if prevent_simultaneous_sink_and_source is not None: + warnings.warn( + 'The use of the prevent_simultaneous_sink_and_source argument is deprecated. Use the prevent_simultaneous_flow_rates argument instead.', + DeprecationWarning, + stacklevel=3, + ) + prevent_simultaneous_flow_rates = prevent_simultaneous_sink_and_source + + return inputs, outputs, prevent_simultaneous_flow_rates + @property def source(self) -> Flow: warnings.warn( @@ -1128,23 +1139,8 @@ def __init__( prevent_simultaneous_flow_rates: bool = False, **kwargs, ): - # Handle backwards compatibility for deprecated parameters - deprecated_mappings = {'source': 'outputs'} - - # Set attribute to allow conflict checking - self._temp_outputs = outputs - - # Handle deprecated parameters using centralized method - updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) - - # Update parameters with deprecated values if provided - if 'outputs' in updated_params: - if outputs is not None: - raise ValueError('Either source or outputs can be specified, but not both.') - outputs = [updated_params['outputs']] - - # Clean up temporary attribute - delattr(self, '_temp_outputs') + # Handle deprecated parameters + outputs = self._handle_deprecated_source_params(kwargs, outputs) # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -1157,6 +1153,24 @@ def __init__( prevent_simultaneous_flows=outputs if prevent_simultaneous_flow_rates else None, ) + def _handle_deprecated_source_params(self, kwargs, outputs): + """Handle deprecated parameter names for Source class.""" + import warnings + + # Handle deprecated 'source' parameter + source = kwargs.pop('source', None) + if source is not None: + warnings.warn( + 'The use of the source argument is deprecated. Use the outputs argument instead.', + DeprecationWarning, + stacklevel=3, + ) + if outputs is not None: + raise ValueError('Either source or outputs can be specified, but not both.') + outputs = [source] + + return outputs + @property def source(self) -> Flow: warnings.warn( @@ -1266,23 +1280,8 @@ def __init__( Note: The deprecated `sink` kwarg is accepted for compatibility but will be removed in future releases. """ - # Handle backwards compatibility for deprecated parameters - deprecated_mappings = {'sink': 'inputs'} - - # Set attribute to allow conflict checking - self._temp_inputs = inputs - - # Handle deprecated parameters using centralized method - updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) - - # Update parameters with deprecated values if provided - if 'inputs' in updated_params: - if inputs is not None: - raise ValueError('Either sink or inputs can be specified, but not both.') - inputs = [updated_params['inputs']] - - # Clean up temporary attribute - delattr(self, '_temp_inputs') + # Handle deprecated parameters + inputs = self._handle_deprecated_sink_params(kwargs, inputs) # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -1295,6 +1294,24 @@ def __init__( prevent_simultaneous_flows=inputs if prevent_simultaneous_flow_rates else None, ) + def _handle_deprecated_sink_params(self, kwargs, inputs): + """Handle deprecated parameter names for Sink class.""" + import warnings + + # Handle deprecated 'sink' parameter + sink = kwargs.pop('sink', None) + if sink is not None: + warnings.warn( + 'The use of the sink argument is deprecated. Use the inputs argument instead.', + DeprecationWarning, + stacklevel=3, + ) + if inputs is not None: + raise ValueError('Either sink or inputs can be specified, but not both.') + inputs = [sink] + + return inputs + @property def sink(self) -> Flow: warnings.warn( diff --git a/flixopt/effects.py b/flixopt/effects.py index 5c5059901..e8236a3b0 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -171,38 +171,135 @@ def __init__( ) self.specific_share_to_other_effects_invest: EffectValuesUser = specific_share_to_other_effects_invest or {} - # Handle backwards compatibility for deprecated parameters - deprecated_mappings = { - 'minimum_operation': 'minimum_temporal', - 'maximum_operation': 'maximum_temporal', - 'minimum_invest': 'minimum_nontemporal', - 'maximum_invest': 'maximum_nontemporal', - 'minimum_operation_per_hour': 'minimum_temporal_per_hour', - 'maximum_operation_per_hour': 'maximum_temporal_per_hour', - } - - # Set attribute values before calling _handle_deprecated_params to allow conflict checking + # Handle deprecated parameters + ( + minimum_temporal, + maximum_temporal, + minimum_nontemporal, + maximum_nontemporal, + minimum_temporal_per_hour, + maximum_temporal_per_hour, + ) = self._handle_deprecated_effect_params( + kwargs, + minimum_temporal, + maximum_temporal, + minimum_nontemporal, + maximum_nontemporal, + minimum_temporal_per_hour, + maximum_temporal_per_hour, + ) + + # Validate any remaining unexpected kwargs + self._validate_kwargs(kwargs) + + # Set attributes self.minimum_temporal = minimum_temporal self.maximum_temporal = maximum_temporal self.minimum_nontemporal = minimum_nontemporal self.maximum_nontemporal = maximum_nontemporal self.minimum_temporal_per_hour = minimum_temporal_per_hour self.maximum_temporal_per_hour = maximum_temporal_per_hour - - # Handle deprecated parameters using centralized method - updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) - - # Update attributes with deprecated values if provided - for param_name, param_value in updated_params.items(): - setattr(self, param_name, param_value) - - # Validate any remaining unexpected kwargs - self._validate_kwargs(kwargs) - - # Set remaining attributes self.minimum_total = minimum_total self.maximum_total = maximum_total + def _handle_deprecated_effect_params( + self, + kwargs, + minimum_temporal, + maximum_temporal, + minimum_nontemporal, + maximum_nontemporal, + minimum_temporal_per_hour, + maximum_temporal_per_hour, + ): + """Handle deprecated parameter names for Effect class.""" + import warnings + + # Handle minimum_operation -> minimum_temporal + minimum_operation = kwargs.pop('minimum_operation', None) + if minimum_operation is not None: + warnings.warn( + "Parameter 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", + DeprecationWarning, + stacklevel=3, + ) + if minimum_temporal is not None: + raise ValueError('Either minimum_operation or minimum_temporal can be specified, but not both.') + minimum_temporal = minimum_operation + + # Handle maximum_operation -> maximum_temporal + maximum_operation = kwargs.pop('maximum_operation', None) + if maximum_operation is not None: + warnings.warn( + "Parameter 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", + DeprecationWarning, + stacklevel=3, + ) + if maximum_temporal is not None: + raise ValueError('Either maximum_operation or maximum_temporal can be specified, but not both.') + maximum_temporal = maximum_operation + + # Handle minimum_invest -> minimum_nontemporal + minimum_invest = kwargs.pop('minimum_invest', None) + if minimum_invest is not None: + warnings.warn( + "Parameter 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", + DeprecationWarning, + stacklevel=3, + ) + if minimum_nontemporal is not None: + raise ValueError('Either minimum_invest or minimum_nontemporal can be specified, but not both.') + minimum_nontemporal = minimum_invest + + # Handle maximum_invest -> maximum_nontemporal + maximum_invest = kwargs.pop('maximum_invest', None) + if maximum_invest is not None: + warnings.warn( + "Parameter 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", + DeprecationWarning, + stacklevel=3, + ) + if maximum_nontemporal is not None: + raise ValueError('Either maximum_invest or maximum_nontemporal can be specified, but not both.') + maximum_nontemporal = maximum_invest + + # Handle minimum_operation_per_hour -> minimum_temporal_per_hour + minimum_operation_per_hour = kwargs.pop('minimum_operation_per_hour', None) + if minimum_operation_per_hour is not None: + warnings.warn( + "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=3, + ) + if minimum_temporal_per_hour is not None: + raise ValueError( + 'Either minimum_operation_per_hour or minimum_temporal_per_hour can be specified, but not both.' + ) + minimum_temporal_per_hour = minimum_operation_per_hour + + # Handle maximum_operation_per_hour -> maximum_temporal_per_hour + maximum_operation_per_hour = kwargs.pop('maximum_operation_per_hour', None) + if maximum_operation_per_hour is not None: + warnings.warn( + "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=3, + ) + if maximum_temporal_per_hour is not None: + raise ValueError( + 'Either maximum_operation_per_hour or maximum_temporal_per_hour can be specified, but not both.' + ) + maximum_temporal_per_hour = maximum_operation_per_hour + + return ( + minimum_temporal, + maximum_temporal, + minimum_nontemporal, + maximum_nontemporal, + minimum_temporal_per_hour, + maximum_temporal_per_hour, + ) + # Backwards compatible properties (deprecated) @property def minimum_operation(self): diff --git a/flixopt/structure.py b/flixopt/structure.py index 664e52107..12cd99c13 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -232,49 +232,6 @@ def _validate_kwargs(self, kwargs: dict, class_name: str = None) -> None: unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) raise TypeError(f'{class_name}.__init__() got unexpected keyword argument(s): {unexpected_params}') - def _handle_deprecated_params(self, kwargs: dict, deprecated_mappings: dict) -> dict: - """ - Handle deprecated parameters by extracting them from kwargs and issuing warnings. - - This method centralizes the deprecated parameter handling pattern used across classes. - It extracts deprecated parameters, issues deprecation warnings, checks for conflicts, - and returns the updated parameter values. - - Args: - kwargs: Dictionary of keyword arguments that may contain deprecated parameters - deprecated_mappings: Dictionary mapping deprecated parameter names to new parameter names - Format: {'deprecated_name': 'new_name'} - - Returns: - Dictionary with new parameter names as keys and their values - - Raises: - ValueError: If both deprecated and new parameters are specified simultaneously - """ - import warnings - - updated_params = {} - - for deprecated_name, new_name in deprecated_mappings.items(): - deprecated_value = kwargs.pop(deprecated_name, None) - - if deprecated_value is not None: - # Issue deprecation warning - warnings.warn( - f"Parameter '{deprecated_name}' is deprecated. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=3, # Skip this method and the calling method to point to user code - ) - - # Check if the new parameter is already set - current_value = getattr(self, new_name, None) - if current_value is not None: - raise ValueError(f"Either '{deprecated_name}' or '{new_name}' can be specified, but not both.") - - updated_params[new_name] = deprecated_value - - return updated_params - @classmethod def _deserialize_dict(cls, data: dict) -> dict | Interface: if '__class__' in data: From 5fe2c6461a56cb9c68c5b419e7db2d00f443ff1a Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:02:05 +0200 Subject: [PATCH 11/27] Improce property handling --- flixopt/effects.py | 271 ++++++++----------------------------------- flixopt/structure.py | 17 +++ 2 files changed, 65 insertions(+), 223 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index e8236a3b0..ac8492494 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -16,7 +16,7 @@ from .core import NumericDataTS, Scalar, TimeSeries from .features import ShareAllocationModel -from .structure import Element, ElementModel, Model, SystemModel, register_class_for_io +from .structure import Element, ElementModel, Model, SystemModel, handle_deprecated_param, register_class_for_io if TYPE_CHECKING: from collections.abc import Iterator @@ -213,237 +213,62 @@ def _handle_deprecated_effect_params( maximum_temporal_per_hour, ): """Handle deprecated parameter names for Effect class.""" - import warnings - # Handle minimum_operation -> minimum_temporal - minimum_operation = kwargs.pop('minimum_operation', None) - if minimum_operation is not None: + # Define the mappings: (old_name, new_name, current_value) + deprecated_mappings = [ + ('minimum_operation', 'minimum_temporal', minimum_temporal), + ('maximum_operation', 'maximum_temporal', maximum_temporal), + ('minimum_invest', 'minimum_nontemporal', minimum_nontemporal), + ('maximum_invest', 'maximum_nontemporal', maximum_nontemporal), + ('minimum_operation_per_hour', 'minimum_temporal_per_hour', minimum_temporal_per_hour), + ('maximum_operation_per_hour', 'maximum_temporal_per_hour', maximum_temporal_per_hour), + ] + + # Process each deprecated parameter + results = [] + for old_name, new_name, current_value in deprecated_mappings: + new_value = self._handle_deprecated_param(kwargs, old_name, new_name, current_value) + results.append(new_value) + + return tuple(results) + + _DEPRECATED_PROPERTIES = { + 'minimum_operation': 'minimum_temporal', + 'maximum_operation': 'maximum_temporal', + 'minimum_invest': 'minimum_nontemporal', + 'maximum_invest': 'maximum_nontemporal', + 'minimum_operation_per_hour': 'minimum_temporal_per_hour', + 'maximum_operation_per_hour': 'maximum_temporal_per_hour', + } + + def __getattr__(self, name): + # Handle deprecated properties + if name in self._DEPRECATED_PROPERTIES: + import warnings + + new_name = self._DEPRECATED_PROPERTIES[name] warnings.warn( - "Parameter 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", + f"Property '{name}' is deprecated. Use '{new_name}' instead.", DeprecationWarning, - stacklevel=3, + stacklevel=2, ) - if minimum_temporal is not None: - raise ValueError('Either minimum_operation or minimum_temporal can be specified, but not both.') - minimum_temporal = minimum_operation + return getattr(self, new_name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") - # Handle maximum_operation -> maximum_temporal - maximum_operation = kwargs.pop('maximum_operation', None) - if maximum_operation is not None: - warnings.warn( - "Parameter 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", - DeprecationWarning, - stacklevel=3, - ) - if maximum_temporal is not None: - raise ValueError('Either maximum_operation or maximum_temporal can be specified, but not both.') - maximum_temporal = maximum_operation - - # Handle minimum_invest -> minimum_nontemporal - minimum_invest = kwargs.pop('minimum_invest', None) - if minimum_invest is not None: - warnings.warn( - "Parameter 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", - DeprecationWarning, - stacklevel=3, - ) - if minimum_nontemporal is not None: - raise ValueError('Either minimum_invest or minimum_nontemporal can be specified, but not both.') - minimum_nontemporal = minimum_invest - - # Handle maximum_invest -> maximum_nontemporal - maximum_invest = kwargs.pop('maximum_invest', None) - if maximum_invest is not None: - warnings.warn( - "Parameter 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", - DeprecationWarning, - stacklevel=3, - ) - if maximum_nontemporal is not None: - raise ValueError('Either maximum_invest or maximum_nontemporal can be specified, but not both.') - maximum_nontemporal = maximum_invest - - # Handle minimum_operation_per_hour -> minimum_temporal_per_hour - minimum_operation_per_hour = kwargs.pop('minimum_operation_per_hour', None) - if minimum_operation_per_hour is not None: - warnings.warn( - "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=3, - ) - if minimum_temporal_per_hour is not None: - raise ValueError( - 'Either minimum_operation_per_hour or minimum_temporal_per_hour can be specified, but not both.' - ) - minimum_temporal_per_hour = minimum_operation_per_hour + def __setattr__(self, name, value): + # Handle deprecated properties + if name in getattr(self, '_DEPRECATED_PROPERTIES', {}): + import warnings - # Handle maximum_operation_per_hour -> maximum_temporal_per_hour - maximum_operation_per_hour = kwargs.pop('maximum_operation_per_hour', None) - if maximum_operation_per_hour is not None: + new_name = self._DEPRECATED_PROPERTIES[name] warnings.warn( - "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + f"Property '{name}' is deprecated. Use '{new_name}' instead.", DeprecationWarning, - stacklevel=3, + stacklevel=2, ) - if maximum_temporal_per_hour is not None: - raise ValueError( - 'Either maximum_operation_per_hour or maximum_temporal_per_hour can be specified, but not both.' - ) - maximum_temporal_per_hour = maximum_operation_per_hour - - return ( - minimum_temporal, - maximum_temporal, - minimum_nontemporal, - maximum_nontemporal, - minimum_temporal_per_hour, - maximum_temporal_per_hour, - ) - - # Backwards compatible properties (deprecated) - @property - def minimum_operation(self): - """DEPRECATED: Use 'minimum_temporal' property instead.""" - import warnings - - warnings.warn( - "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.minimum_temporal - - @minimum_operation.setter - def minimum_operation(self, value): - """DEPRECATED: Use 'minimum_temporal' property instead.""" - import warnings - - warnings.warn( - "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.minimum_temporal = value - - @property - def maximum_operation(self): - """DEPRECATED: Use 'maximum_temporal' property instead.""" - import warnings - - warnings.warn( - "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.maximum_temporal - - @maximum_operation.setter - def maximum_operation(self, value): - """DEPRECATED: Use 'maximum_temporal' property instead.""" - import warnings - - warnings.warn( - "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.maximum_temporal = value - - @property - def minimum_invest(self): - """DEPRECATED: Use 'minimum_nontemporal' property instead.""" - import warnings - - warnings.warn( - "Property 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.minimum_nontemporal - - @minimum_invest.setter - def minimum_invest(self, value): - """DEPRECATED: Use 'minimum_nontemporal' property instead.""" - import warnings - - warnings.warn( - "Property 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.minimum_nontemporal = value - - @property - def maximum_invest(self): - """DEPRECATED: Use 'maximum_nontemporal' property instead.""" - import warnings - - warnings.warn( - "Property 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.maximum_nontemporal - - @maximum_invest.setter - def maximum_invest(self, value): - """DEPRECATED: Use 'maximum_nontemporal' property instead.""" - import warnings - - warnings.warn( - "Property 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.maximum_nontemporal = value - - @property - def minimum_operation_per_hour(self): - """DEPRECATED: Use 'minimum_temporal_per_hour' property instead.""" - import warnings - - warnings.warn( - "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.minimum_temporal_per_hour - - @minimum_operation_per_hour.setter - def minimum_operation_per_hour(self, value): - """DEPRECATED: Use 'minimum_temporal_per_hour' property instead.""" - import warnings - - warnings.warn( - "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.minimum_temporal_per_hour = value - - @property - def maximum_operation_per_hour(self): - """DEPRECATED: Use 'maximum_temporal_per_hour' property instead.""" - import warnings - - warnings.warn( - "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.maximum_temporal_per_hour - - @maximum_operation_per_hour.setter - def maximum_operation_per_hour(self, value): - """DEPRECATED: Use 'maximum_temporal_per_hour' property instead.""" - import warnings - - warnings.warn( - "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.maximum_temporal_per_hour = value + setattr(self, new_name, value) + return + super().__setattr__(name, value) def transform_data(self, flow_system: FlowSystem): self.minimum_temporal_per_hour = flow_system.create_time_series( diff --git a/flixopt/structure.py b/flixopt/structure.py index 12cd99c13..5ef87bfdf 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -660,3 +660,20 @@ def get_str_representation(data: Any, array_threshold: int = 50, decimals: int = console = Console(file=output_buffer, width=1000) # Adjust width as needed console.print(Pretty(formatted_data, expand_all=True, indent_guides=True)) return output_buffer.getvalue() + + +def handle_deprecated_param(kwargs, old_name, new_name, current_value): + """Helper to handle a single deprecated parameter.""" + import warnings + + old_value = kwargs.pop(old_name, None) + if old_value is not None: + warnings.warn( + f"Parameter '{old_name}' is deprecated. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=4, # Adjusted for call stack depth + ) + if current_value is not None: + raise ValueError(f'Either {old_name} or {new_name} can be specified, but not both.') + return old_value + return current_value From 09bdeec2362f773af5e8e2bf11e2c6703d0b693b Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:11:06 +0200 Subject: [PATCH 12/27] Move handling to Interface class --- flixopt/effects.py | 31 +------------------------------ flixopt/structure.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 30 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index ac8492494..702ae3580 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -227,7 +227,7 @@ def _handle_deprecated_effect_params( # Process each deprecated parameter results = [] for old_name, new_name, current_value in deprecated_mappings: - new_value = self._handle_deprecated_param(kwargs, old_name, new_name, current_value) + new_value = handle_deprecated_param(kwargs, old_name, new_name, current_value) results.append(new_value) return tuple(results) @@ -241,35 +241,6 @@ def _handle_deprecated_effect_params( 'maximum_operation_per_hour': 'maximum_temporal_per_hour', } - def __getattr__(self, name): - # Handle deprecated properties - if name in self._DEPRECATED_PROPERTIES: - import warnings - - new_name = self._DEPRECATED_PROPERTIES[name] - warnings.warn( - f"Property '{name}' is deprecated. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=2, - ) - return getattr(self, new_name) - raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") - - def __setattr__(self, name, value): - # Handle deprecated properties - if name in getattr(self, '_DEPRECATED_PROPERTIES', {}): - import warnings - - new_name = self._DEPRECATED_PROPERTIES[name] - warnings.warn( - f"Property '{name}' is deprecated. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=2, - ) - setattr(self, new_name, value) - return - super().__setattr__(name, value) - def transform_data(self, flow_system: FlowSystem): self.minimum_temporal_per_hour = flow_system.create_time_series( f'{self.label_full}|minimum_temporal_per_hour', self.minimum_temporal_per_hour diff --git a/flixopt/structure.py b/flixopt/structure.py index 5ef87bfdf..4aa1a4b5a 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -115,6 +115,37 @@ class Interface: This class is used to collect arguments about a Model. Its the base class for all Elements and Models in flixopt. """ + _DEPRECATED_PROPERTIES = {} + + def __getattr__(self, name): + # Handle deprecated properties + if name in self._DEPRECATED_PROPERTIES: + import warnings + + new_name = self._DEPRECATED_PROPERTIES[name] + warnings.warn( + f"Property '{name}' is deprecated. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=2, + ) + return getattr(self, new_name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, name, value): + # Handle deprecated properties + if name in getattr(self, '_DEPRECATED_PROPERTIES', {}): + import warnings + + new_name = self._DEPRECATED_PROPERTIES[name] + warnings.warn( + f"Property '{name}' is deprecated. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=2, + ) + setattr(self, new_name, value) + return + super().__setattr__(name, value) + def transform_data(self, flow_system: FlowSystem): """Transforms the data of the interface to match the FlowSystem's dimensions""" raise NotImplementedError('Every Interface needs a transform_data() method') From 3c0db766a5770f5e19fe07459f46b44d8f8952ee Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:28:55 +0200 Subject: [PATCH 13/27] Getting lost --- flixopt/effects.py | 4 +-- flixopt/structure.py | 71 ++++++++++++++++++++++++++++++-------------- 2 files changed, 50 insertions(+), 25 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index 702ae3580..a2ae4b1e3 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -16,7 +16,7 @@ from .core import NumericDataTS, Scalar, TimeSeries from .features import ShareAllocationModel -from .structure import Element, ElementModel, Model, SystemModel, handle_deprecated_param, register_class_for_io +from .structure import Element, ElementModel, Model, SystemModel, register_class_for_io if TYPE_CHECKING: from collections.abc import Iterator @@ -227,7 +227,7 @@ def _handle_deprecated_effect_params( # Process each deprecated parameter results = [] for old_name, new_name, current_value in deprecated_mappings: - new_value = handle_deprecated_param(kwargs, old_name, new_name, current_value) + new_value = self._handle_deprecated_param(kwargs, old_name, new_name, current_value) results.append(new_value) return tuple(results) diff --git a/flixopt/structure.py b/flixopt/structure.py index 4aa1a4b5a..2a4ffdc8f 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -116,9 +116,13 @@ class Interface: """ _DEPRECATED_PROPERTIES = {} + """Dictionary of deprecated properties and their new names.""" def __getattr__(self, name): - # Handle deprecated properties + """ + Handle deprecated property access. Custom getters might be defined by a method named + "_get_deprecated_()". This allows to apply transformations or add extra warnings/errors. + """ if name in self._DEPRECATED_PROPERTIES: import warnings @@ -128,24 +132,62 @@ def __getattr__(self, name): DeprecationWarning, stacklevel=2, ) - return getattr(self, new_name) + + # Check if subclass has custom transformation logic + custom_getter = getattr(self, f'_get_deprecated_{name}', None) + if custom_getter: + return custom_getter() + else: + # Default behavior: just get the new attribute + return getattr(self, new_name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") def __setattr__(self, name, value): - # Handle deprecated properties - if name in getattr(self, '_DEPRECATED_PROPERTIES', {}): + """ + Handle deprecated property setting. Custom setters might be defined by a method named + "_set_deprecated_()". This allows to apply transformations or add extra warnings/errors. + """ + deprecated_props = getattr(self.__class__, '_DEPRECATED_PROPERTIES', {}) + + if name in deprecated_props: import warnings - new_name = self._DEPRECATED_PROPERTIES[name] + new_name = deprecated_props[name] warnings.warn( f"Property '{name}' is deprecated. Use '{new_name}' instead.", DeprecationWarning, stacklevel=2, ) - setattr(self, new_name, value) + + # Check if subclass has custom transformation logic + custom_setter = getattr(self, f'_set_deprecated_{name}', None) + if custom_setter: + custom_setter(value) + else: + # Default behavior: just set the new attribute + setattr(self, new_name, value) return + super().__setattr__(name, value) + @staticmethod + def handle_deprecated_param(kwargs, old_name, new_name, current_value): + """Helper to handle a single deprecated parameter.""" + import warnings + + old_value = kwargs.pop(old_name, None) + if old_value is not None: + warnings.warn( + f"Parameter '{old_name}' is deprecated. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=4, # Adjusted for call stack depth + ) + if current_value is not None: + raise ValueError(f'Either {old_name} or {new_name} can be specified, but not both.') + return old_value + return current_value + def transform_data(self, flow_system: FlowSystem): """Transforms the data of the interface to match the FlowSystem's dimensions""" raise NotImplementedError('Every Interface needs a transform_data() method') @@ -691,20 +733,3 @@ def get_str_representation(data: Any, array_threshold: int = 50, decimals: int = console = Console(file=output_buffer, width=1000) # Adjust width as needed console.print(Pretty(formatted_data, expand_all=True, indent_guides=True)) return output_buffer.getvalue() - - -def handle_deprecated_param(kwargs, old_name, new_name, current_value): - """Helper to handle a single deprecated parameter.""" - import warnings - - old_value = kwargs.pop(old_name, None) - if old_value is not None: - warnings.warn( - f"Parameter '{old_name}' is deprecated. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=4, # Adjusted for call stack depth - ) - if current_value is not None: - raise ValueError(f'Either {old_name} or {new_name} can be specified, but not both.') - return old_value - return current_value From 2f817d17bfcfbd39f411caaae7cb55b2ef292d66 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:37:39 +0200 Subject: [PATCH 14/27] Revert "Getting lost" This reverts commit 3c0db766a5770f5e19fe07459f46b44d8f8952ee. --- flixopt/effects.py | 4 +-- flixopt/structure.py | 71 ++++++++++++++------------------------------ 2 files changed, 25 insertions(+), 50 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index a2ae4b1e3..702ae3580 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -16,7 +16,7 @@ from .core import NumericDataTS, Scalar, TimeSeries from .features import ShareAllocationModel -from .structure import Element, ElementModel, Model, SystemModel, register_class_for_io +from .structure import Element, ElementModel, Model, SystemModel, handle_deprecated_param, register_class_for_io if TYPE_CHECKING: from collections.abc import Iterator @@ -227,7 +227,7 @@ def _handle_deprecated_effect_params( # Process each deprecated parameter results = [] for old_name, new_name, current_value in deprecated_mappings: - new_value = self._handle_deprecated_param(kwargs, old_name, new_name, current_value) + new_value = handle_deprecated_param(kwargs, old_name, new_name, current_value) results.append(new_value) return tuple(results) diff --git a/flixopt/structure.py b/flixopt/structure.py index 2a4ffdc8f..4aa1a4b5a 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -116,13 +116,9 @@ class Interface: """ _DEPRECATED_PROPERTIES = {} - """Dictionary of deprecated properties and their new names.""" def __getattr__(self, name): - """ - Handle deprecated property access. Custom getters might be defined by a method named - "_get_deprecated_()". This allows to apply transformations or add extra warnings/errors. - """ + # Handle deprecated properties if name in self._DEPRECATED_PROPERTIES: import warnings @@ -132,62 +128,24 @@ def __getattr__(self, name): DeprecationWarning, stacklevel=2, ) - - # Check if subclass has custom transformation logic - custom_getter = getattr(self, f'_get_deprecated_{name}', None) - if custom_getter: - return custom_getter() - else: - # Default behavior: just get the new attribute - return getattr(self, new_name) - + return getattr(self, new_name) raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") def __setattr__(self, name, value): - """ - Handle deprecated property setting. Custom setters might be defined by a method named - "_set_deprecated_()". This allows to apply transformations or add extra warnings/errors. - """ - deprecated_props = getattr(self.__class__, '_DEPRECATED_PROPERTIES', {}) - - if name in deprecated_props: + # Handle deprecated properties + if name in getattr(self, '_DEPRECATED_PROPERTIES', {}): import warnings - new_name = deprecated_props[name] + new_name = self._DEPRECATED_PROPERTIES[name] warnings.warn( f"Property '{name}' is deprecated. Use '{new_name}' instead.", DeprecationWarning, stacklevel=2, ) - - # Check if subclass has custom transformation logic - custom_setter = getattr(self, f'_set_deprecated_{name}', None) - if custom_setter: - custom_setter(value) - else: - # Default behavior: just set the new attribute - setattr(self, new_name, value) + setattr(self, new_name, value) return - super().__setattr__(name, value) - @staticmethod - def handle_deprecated_param(kwargs, old_name, new_name, current_value): - """Helper to handle a single deprecated parameter.""" - import warnings - - old_value = kwargs.pop(old_name, None) - if old_value is not None: - warnings.warn( - f"Parameter '{old_name}' is deprecated. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=4, # Adjusted for call stack depth - ) - if current_value is not None: - raise ValueError(f'Either {old_name} or {new_name} can be specified, but not both.') - return old_value - return current_value - def transform_data(self, flow_system: FlowSystem): """Transforms the data of the interface to match the FlowSystem's dimensions""" raise NotImplementedError('Every Interface needs a transform_data() method') @@ -733,3 +691,20 @@ def get_str_representation(data: Any, array_threshold: int = 50, decimals: int = console = Console(file=output_buffer, width=1000) # Adjust width as needed console.print(Pretty(formatted_data, expand_all=True, indent_guides=True)) return output_buffer.getvalue() + + +def handle_deprecated_param(kwargs, old_name, new_name, current_value): + """Helper to handle a single deprecated parameter.""" + import warnings + + old_value = kwargs.pop(old_name, None) + if old_value is not None: + warnings.warn( + f"Parameter '{old_name}' is deprecated. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=4, # Adjusted for call stack depth + ) + if current_value is not None: + raise ValueError(f'Either {old_name} or {new_name} can be specified, but not both.') + return old_value + return current_value From fb0af15ed67a43d8f9e8806c9d1e0e6458120db3 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:37:43 +0200 Subject: [PATCH 15/27] Revert "Move handling to Interface class" This reverts commit 09bdeec2362f773af5e8e2bf11e2c6703d0b693b. --- flixopt/effects.py | 31 ++++++++++++++++++++++++++++++- flixopt/structure.py | 31 ------------------------------- 2 files changed, 30 insertions(+), 32 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index 702ae3580..ac8492494 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -227,7 +227,7 @@ def _handle_deprecated_effect_params( # Process each deprecated parameter results = [] for old_name, new_name, current_value in deprecated_mappings: - new_value = handle_deprecated_param(kwargs, old_name, new_name, current_value) + new_value = self._handle_deprecated_param(kwargs, old_name, new_name, current_value) results.append(new_value) return tuple(results) @@ -241,6 +241,35 @@ def _handle_deprecated_effect_params( 'maximum_operation_per_hour': 'maximum_temporal_per_hour', } + def __getattr__(self, name): + # Handle deprecated properties + if name in self._DEPRECATED_PROPERTIES: + import warnings + + new_name = self._DEPRECATED_PROPERTIES[name] + warnings.warn( + f"Property '{name}' is deprecated. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=2, + ) + return getattr(self, new_name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, name, value): + # Handle deprecated properties + if name in getattr(self, '_DEPRECATED_PROPERTIES', {}): + import warnings + + new_name = self._DEPRECATED_PROPERTIES[name] + warnings.warn( + f"Property '{name}' is deprecated. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=2, + ) + setattr(self, new_name, value) + return + super().__setattr__(name, value) + def transform_data(self, flow_system: FlowSystem): self.minimum_temporal_per_hour = flow_system.create_time_series( f'{self.label_full}|minimum_temporal_per_hour', self.minimum_temporal_per_hour diff --git a/flixopt/structure.py b/flixopt/structure.py index 4aa1a4b5a..5ef87bfdf 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -115,37 +115,6 @@ class Interface: This class is used to collect arguments about a Model. Its the base class for all Elements and Models in flixopt. """ - _DEPRECATED_PROPERTIES = {} - - def __getattr__(self, name): - # Handle deprecated properties - if name in self._DEPRECATED_PROPERTIES: - import warnings - - new_name = self._DEPRECATED_PROPERTIES[name] - warnings.warn( - f"Property '{name}' is deprecated. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=2, - ) - return getattr(self, new_name) - raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") - - def __setattr__(self, name, value): - # Handle deprecated properties - if name in getattr(self, '_DEPRECATED_PROPERTIES', {}): - import warnings - - new_name = self._DEPRECATED_PROPERTIES[name] - warnings.warn( - f"Property '{name}' is deprecated. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=2, - ) - setattr(self, new_name, value) - return - super().__setattr__(name, value) - def transform_data(self, flow_system: FlowSystem): """Transforms the data of the interface to match the FlowSystem's dimensions""" raise NotImplementedError('Every Interface needs a transform_data() method') From 97b6295a0995aa9c8878252e552cc9cbcda6e4c6 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:37:46 +0200 Subject: [PATCH 16/27] Revert "Improce property handling" This reverts commit 5fe2c6461a56cb9c68c5b419e7db2d00f443ff1a. --- flixopt/effects.py | 271 +++++++++++++++++++++++++++++++++++-------- flixopt/structure.py | 17 --- 2 files changed, 223 insertions(+), 65 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index ac8492494..e8236a3b0 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -16,7 +16,7 @@ from .core import NumericDataTS, Scalar, TimeSeries from .features import ShareAllocationModel -from .structure import Element, ElementModel, Model, SystemModel, handle_deprecated_param, register_class_for_io +from .structure import Element, ElementModel, Model, SystemModel, register_class_for_io if TYPE_CHECKING: from collections.abc import Iterator @@ -213,62 +213,237 @@ def _handle_deprecated_effect_params( maximum_temporal_per_hour, ): """Handle deprecated parameter names for Effect class.""" + import warnings - # Define the mappings: (old_name, new_name, current_value) - deprecated_mappings = [ - ('minimum_operation', 'minimum_temporal', minimum_temporal), - ('maximum_operation', 'maximum_temporal', maximum_temporal), - ('minimum_invest', 'minimum_nontemporal', minimum_nontemporal), - ('maximum_invest', 'maximum_nontemporal', maximum_nontemporal), - ('minimum_operation_per_hour', 'minimum_temporal_per_hour', minimum_temporal_per_hour), - ('maximum_operation_per_hour', 'maximum_temporal_per_hour', maximum_temporal_per_hour), - ] - - # Process each deprecated parameter - results = [] - for old_name, new_name, current_value in deprecated_mappings: - new_value = self._handle_deprecated_param(kwargs, old_name, new_name, current_value) - results.append(new_value) - - return tuple(results) - - _DEPRECATED_PROPERTIES = { - 'minimum_operation': 'minimum_temporal', - 'maximum_operation': 'maximum_temporal', - 'minimum_invest': 'minimum_nontemporal', - 'maximum_invest': 'maximum_nontemporal', - 'minimum_operation_per_hour': 'minimum_temporal_per_hour', - 'maximum_operation_per_hour': 'maximum_temporal_per_hour', - } - - def __getattr__(self, name): - # Handle deprecated properties - if name in self._DEPRECATED_PROPERTIES: - import warnings - - new_name = self._DEPRECATED_PROPERTIES[name] + # Handle minimum_operation -> minimum_temporal + minimum_operation = kwargs.pop('minimum_operation', None) + if minimum_operation is not None: warnings.warn( - f"Property '{name}' is deprecated. Use '{new_name}' instead.", + "Parameter 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", DeprecationWarning, - stacklevel=2, + stacklevel=3, ) - return getattr(self, new_name) - raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + if minimum_temporal is not None: + raise ValueError('Either minimum_operation or minimum_temporal can be specified, but not both.') + minimum_temporal = minimum_operation - def __setattr__(self, name, value): - # Handle deprecated properties - if name in getattr(self, '_DEPRECATED_PROPERTIES', {}): - import warnings + # Handle maximum_operation -> maximum_temporal + maximum_operation = kwargs.pop('maximum_operation', None) + if maximum_operation is not None: + warnings.warn( + "Parameter 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", + DeprecationWarning, + stacklevel=3, + ) + if maximum_temporal is not None: + raise ValueError('Either maximum_operation or maximum_temporal can be specified, but not both.') + maximum_temporal = maximum_operation + + # Handle minimum_invest -> minimum_nontemporal + minimum_invest = kwargs.pop('minimum_invest', None) + if minimum_invest is not None: + warnings.warn( + "Parameter 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", + DeprecationWarning, + stacklevel=3, + ) + if minimum_nontemporal is not None: + raise ValueError('Either minimum_invest or minimum_nontemporal can be specified, but not both.') + minimum_nontemporal = minimum_invest + + # Handle maximum_invest -> maximum_nontemporal + maximum_invest = kwargs.pop('maximum_invest', None) + if maximum_invest is not None: + warnings.warn( + "Parameter 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", + DeprecationWarning, + stacklevel=3, + ) + if maximum_nontemporal is not None: + raise ValueError('Either maximum_invest or maximum_nontemporal can be specified, but not both.') + maximum_nontemporal = maximum_invest + + # Handle minimum_operation_per_hour -> minimum_temporal_per_hour + minimum_operation_per_hour = kwargs.pop('minimum_operation_per_hour', None) + if minimum_operation_per_hour is not None: + warnings.warn( + "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=3, + ) + if minimum_temporal_per_hour is not None: + raise ValueError( + 'Either minimum_operation_per_hour or minimum_temporal_per_hour can be specified, but not both.' + ) + minimum_temporal_per_hour = minimum_operation_per_hour - new_name = self._DEPRECATED_PROPERTIES[name] + # Handle maximum_operation_per_hour -> maximum_temporal_per_hour + maximum_operation_per_hour = kwargs.pop('maximum_operation_per_hour', None) + if maximum_operation_per_hour is not None: warnings.warn( - f"Property '{name}' is deprecated. Use '{new_name}' instead.", + "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", DeprecationWarning, - stacklevel=2, + stacklevel=3, ) - setattr(self, new_name, value) - return - super().__setattr__(name, value) + if maximum_temporal_per_hour is not None: + raise ValueError( + 'Either maximum_operation_per_hour or maximum_temporal_per_hour can be specified, but not both.' + ) + maximum_temporal_per_hour = maximum_operation_per_hour + + return ( + minimum_temporal, + maximum_temporal, + minimum_nontemporal, + maximum_nontemporal, + minimum_temporal_per_hour, + maximum_temporal_per_hour, + ) + + # Backwards compatible properties (deprecated) + @property + def minimum_operation(self): + """DEPRECATED: Use 'minimum_temporal' property instead.""" + import warnings + + warnings.warn( + "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.minimum_temporal + + @minimum_operation.setter + def minimum_operation(self, value): + """DEPRECATED: Use 'minimum_temporal' property instead.""" + import warnings + + warnings.warn( + "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.minimum_temporal = value + + @property + def maximum_operation(self): + """DEPRECATED: Use 'maximum_temporal' property instead.""" + import warnings + + warnings.warn( + "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.maximum_temporal + + @maximum_operation.setter + def maximum_operation(self, value): + """DEPRECATED: Use 'maximum_temporal' property instead.""" + import warnings + + warnings.warn( + "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.maximum_temporal = value + + @property + def minimum_invest(self): + """DEPRECATED: Use 'minimum_nontemporal' property instead.""" + import warnings + + warnings.warn( + "Property 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.minimum_nontemporal + + @minimum_invest.setter + def minimum_invest(self, value): + """DEPRECATED: Use 'minimum_nontemporal' property instead.""" + import warnings + + warnings.warn( + "Property 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.minimum_nontemporal = value + + @property + def maximum_invest(self): + """DEPRECATED: Use 'maximum_nontemporal' property instead.""" + import warnings + + warnings.warn( + "Property 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.maximum_nontemporal + + @maximum_invest.setter + def maximum_invest(self, value): + """DEPRECATED: Use 'maximum_nontemporal' property instead.""" + import warnings + + warnings.warn( + "Property 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.maximum_nontemporal = value + + @property + def minimum_operation_per_hour(self): + """DEPRECATED: Use 'minimum_temporal_per_hour' property instead.""" + import warnings + + warnings.warn( + "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.minimum_temporal_per_hour + + @minimum_operation_per_hour.setter + def minimum_operation_per_hour(self, value): + """DEPRECATED: Use 'minimum_temporal_per_hour' property instead.""" + import warnings + + warnings.warn( + "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.minimum_temporal_per_hour = value + + @property + def maximum_operation_per_hour(self): + """DEPRECATED: Use 'maximum_temporal_per_hour' property instead.""" + import warnings + + warnings.warn( + "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.maximum_temporal_per_hour + + @maximum_operation_per_hour.setter + def maximum_operation_per_hour(self, value): + """DEPRECATED: Use 'maximum_temporal_per_hour' property instead.""" + import warnings + + warnings.warn( + "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.maximum_temporal_per_hour = value def transform_data(self, flow_system: FlowSystem): self.minimum_temporal_per_hour = flow_system.create_time_series( diff --git a/flixopt/structure.py b/flixopt/structure.py index 5ef87bfdf..12cd99c13 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -660,20 +660,3 @@ def get_str_representation(data: Any, array_threshold: int = 50, decimals: int = console = Console(file=output_buffer, width=1000) # Adjust width as needed console.print(Pretty(formatted_data, expand_all=True, indent_guides=True)) return output_buffer.getvalue() - - -def handle_deprecated_param(kwargs, old_name, new_name, current_value): - """Helper to handle a single deprecated parameter.""" - import warnings - - old_value = kwargs.pop(old_name, None) - if old_value is not None: - warnings.warn( - f"Parameter '{old_name}' is deprecated. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=4, # Adjusted for call stack depth - ) - if current_value is not None: - raise ValueError(f'Either {old_name} or {new_name} can be specified, but not both.') - return old_value - return current_value From 2cc5e4c62243839c62de4769e8803dfacfc85684 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:37:51 +0200 Subject: [PATCH 17/27] Revert "Move handlign from centralized back to classes in a dedicated method" This reverts commit 9f4c1f6acddd50b60199fd67ea6be55491752faa. --- flixopt/components.py | 151 +++++++++++++++++++----------------------- flixopt/effects.py | 143 +++++++-------------------------------- flixopt/structure.py | 43 ++++++++++++ 3 files changed, 133 insertions(+), 204 deletions(-) diff --git a/flixopt/components.py b/flixopt/components.py index 26c49e489..1cfc15d9d 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -970,10 +970,39 @@ def __init__( meta_data: dict | None = None, **kwargs, ): - # Handle deprecated parameters - inputs, outputs, prevent_simultaneous_flow_rates = self._handle_deprecated_source_and_sink_params( - kwargs, inputs, outputs, prevent_simultaneous_flow_rates - ) + # Handle backwards compatibility for deprecated parameters + deprecated_mappings = { + 'source': 'outputs', + 'sink': 'inputs', + 'prevent_simultaneous_sink_and_source': 'prevent_simultaneous_flow_rates', + } + + # Set attribute values to allow conflict checking + self._temp_outputs = outputs + self._temp_inputs = inputs + self._temp_prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates + + # Handle deprecated parameters using centralized method + updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) + + # Update parameters with deprecated values if provided, with special handling for list parameters + if 'outputs' in updated_params: + if outputs is not None: + raise ValueError('Either source or outputs can be specified, but not both.') + outputs = [updated_params['outputs']] + + if 'inputs' in updated_params: + if inputs is not None: + raise ValueError('Either sink or inputs can be specified, but not both.') + inputs = [updated_params['inputs']] + + if 'prevent_simultaneous_flow_rates' in updated_params: + prevent_simultaneous_flow_rates = updated_params['prevent_simultaneous_flow_rates'] + + # Clean up temporary attributes + delattr(self, '_temp_outputs') + delattr(self, '_temp_inputs') + delattr(self, '_temp_prevent_simultaneous_flow_rates') # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -987,46 +1016,6 @@ def __init__( ) self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates - def _handle_deprecated_source_and_sink_params(self, kwargs, inputs, outputs, prevent_simultaneous_flow_rates): - """Handle deprecated parameter names for SourceAndSink class.""" - import warnings - - # Handle deprecated 'source' parameter - source = kwargs.pop('source', None) - if source is not None: - warnings.warn( - 'The use of the source argument is deprecated. Use the outputs argument instead.', - DeprecationWarning, - stacklevel=3, - ) - if outputs is not None: - raise ValueError('Either source or outputs can be specified, but not both.') - outputs = [source] - - # Handle deprecated 'sink' parameter - sink = kwargs.pop('sink', None) - if sink is not None: - warnings.warn( - 'The use of the sink argument is deprecated. Use the inputs argument instead.', - DeprecationWarning, - stacklevel=3, - ) - if inputs is not None: - raise ValueError('Either sink or inputs can be specified, but not both.') - inputs = [sink] - - # Handle deprecated 'prevent_simultaneous_sink_and_source' parameter - prevent_simultaneous_sink_and_source = kwargs.pop('prevent_simultaneous_sink_and_source', None) - if prevent_simultaneous_sink_and_source is not None: - warnings.warn( - 'The use of the prevent_simultaneous_sink_and_source argument is deprecated. Use the prevent_simultaneous_flow_rates argument instead.', - DeprecationWarning, - stacklevel=3, - ) - prevent_simultaneous_flow_rates = prevent_simultaneous_sink_and_source - - return inputs, outputs, prevent_simultaneous_flow_rates - @property def source(self) -> Flow: warnings.warn( @@ -1139,8 +1128,23 @@ def __init__( prevent_simultaneous_flow_rates: bool = False, **kwargs, ): - # Handle deprecated parameters - outputs = self._handle_deprecated_source_params(kwargs, outputs) + # Handle backwards compatibility for deprecated parameters + deprecated_mappings = {'source': 'outputs'} + + # Set attribute to allow conflict checking + self._temp_outputs = outputs + + # Handle deprecated parameters using centralized method + updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) + + # Update parameters with deprecated values if provided + if 'outputs' in updated_params: + if outputs is not None: + raise ValueError('Either source or outputs can be specified, but not both.') + outputs = [updated_params['outputs']] + + # Clean up temporary attribute + delattr(self, '_temp_outputs') # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -1153,24 +1157,6 @@ def __init__( prevent_simultaneous_flows=outputs if prevent_simultaneous_flow_rates else None, ) - def _handle_deprecated_source_params(self, kwargs, outputs): - """Handle deprecated parameter names for Source class.""" - import warnings - - # Handle deprecated 'source' parameter - source = kwargs.pop('source', None) - if source is not None: - warnings.warn( - 'The use of the source argument is deprecated. Use the outputs argument instead.', - DeprecationWarning, - stacklevel=3, - ) - if outputs is not None: - raise ValueError('Either source or outputs can be specified, but not both.') - outputs = [source] - - return outputs - @property def source(self) -> Flow: warnings.warn( @@ -1280,8 +1266,23 @@ def __init__( Note: The deprecated `sink` kwarg is accepted for compatibility but will be removed in future releases. """ - # Handle deprecated parameters - inputs = self._handle_deprecated_sink_params(kwargs, inputs) + # Handle backwards compatibility for deprecated parameters + deprecated_mappings = {'sink': 'inputs'} + + # Set attribute to allow conflict checking + self._temp_inputs = inputs + + # Handle deprecated parameters using centralized method + updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) + + # Update parameters with deprecated values if provided + if 'inputs' in updated_params: + if inputs is not None: + raise ValueError('Either sink or inputs can be specified, but not both.') + inputs = [updated_params['inputs']] + + # Clean up temporary attribute + delattr(self, '_temp_inputs') # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -1294,24 +1295,6 @@ def __init__( prevent_simultaneous_flows=inputs if prevent_simultaneous_flow_rates else None, ) - def _handle_deprecated_sink_params(self, kwargs, inputs): - """Handle deprecated parameter names for Sink class.""" - import warnings - - # Handle deprecated 'sink' parameter - sink = kwargs.pop('sink', None) - if sink is not None: - warnings.warn( - 'The use of the sink argument is deprecated. Use the inputs argument instead.', - DeprecationWarning, - stacklevel=3, - ) - if inputs is not None: - raise ValueError('Either sink or inputs can be specified, but not both.') - inputs = [sink] - - return inputs - @property def sink(self) -> Flow: warnings.warn( diff --git a/flixopt/effects.py b/flixopt/effects.py index e8236a3b0..5c5059901 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -171,134 +171,37 @@ def __init__( ) self.specific_share_to_other_effects_invest: EffectValuesUser = specific_share_to_other_effects_invest or {} - # Handle deprecated parameters - ( - minimum_temporal, - maximum_temporal, - minimum_nontemporal, - maximum_nontemporal, - minimum_temporal_per_hour, - maximum_temporal_per_hour, - ) = self._handle_deprecated_effect_params( - kwargs, - minimum_temporal, - maximum_temporal, - minimum_nontemporal, - maximum_nontemporal, - minimum_temporal_per_hour, - maximum_temporal_per_hour, - ) - - # Validate any remaining unexpected kwargs - self._validate_kwargs(kwargs) - - # Set attributes + # Handle backwards compatibility for deprecated parameters + deprecated_mappings = { + 'minimum_operation': 'minimum_temporal', + 'maximum_operation': 'maximum_temporal', + 'minimum_invest': 'minimum_nontemporal', + 'maximum_invest': 'maximum_nontemporal', + 'minimum_operation_per_hour': 'minimum_temporal_per_hour', + 'maximum_operation_per_hour': 'maximum_temporal_per_hour', + } + + # Set attribute values before calling _handle_deprecated_params to allow conflict checking self.minimum_temporal = minimum_temporal self.maximum_temporal = maximum_temporal self.minimum_nontemporal = minimum_nontemporal self.maximum_nontemporal = maximum_nontemporal self.minimum_temporal_per_hour = minimum_temporal_per_hour self.maximum_temporal_per_hour = maximum_temporal_per_hour - self.minimum_total = minimum_total - self.maximum_total = maximum_total - def _handle_deprecated_effect_params( - self, - kwargs, - minimum_temporal, - maximum_temporal, - minimum_nontemporal, - maximum_nontemporal, - minimum_temporal_per_hour, - maximum_temporal_per_hour, - ): - """Handle deprecated parameter names for Effect class.""" - import warnings + # Handle deprecated parameters using centralized method + updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) - # Handle minimum_operation -> minimum_temporal - minimum_operation = kwargs.pop('minimum_operation', None) - if minimum_operation is not None: - warnings.warn( - "Parameter 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", - DeprecationWarning, - stacklevel=3, - ) - if minimum_temporal is not None: - raise ValueError('Either minimum_operation or minimum_temporal can be specified, but not both.') - minimum_temporal = minimum_operation - - # Handle maximum_operation -> maximum_temporal - maximum_operation = kwargs.pop('maximum_operation', None) - if maximum_operation is not None: - warnings.warn( - "Parameter 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", - DeprecationWarning, - stacklevel=3, - ) - if maximum_temporal is not None: - raise ValueError('Either maximum_operation or maximum_temporal can be specified, but not both.') - maximum_temporal = maximum_operation - - # Handle minimum_invest -> minimum_nontemporal - minimum_invest = kwargs.pop('minimum_invest', None) - if minimum_invest is not None: - warnings.warn( - "Parameter 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", - DeprecationWarning, - stacklevel=3, - ) - if minimum_nontemporal is not None: - raise ValueError('Either minimum_invest or minimum_nontemporal can be specified, but not both.') - minimum_nontemporal = minimum_invest - - # Handle maximum_invest -> maximum_nontemporal - maximum_invest = kwargs.pop('maximum_invest', None) - if maximum_invest is not None: - warnings.warn( - "Parameter 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", - DeprecationWarning, - stacklevel=3, - ) - if maximum_nontemporal is not None: - raise ValueError('Either maximum_invest or maximum_nontemporal can be specified, but not both.') - maximum_nontemporal = maximum_invest - - # Handle minimum_operation_per_hour -> minimum_temporal_per_hour - minimum_operation_per_hour = kwargs.pop('minimum_operation_per_hour', None) - if minimum_operation_per_hour is not None: - warnings.warn( - "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=3, - ) - if minimum_temporal_per_hour is not None: - raise ValueError( - 'Either minimum_operation_per_hour or minimum_temporal_per_hour can be specified, but not both.' - ) - minimum_temporal_per_hour = minimum_operation_per_hour - - # Handle maximum_operation_per_hour -> maximum_temporal_per_hour - maximum_operation_per_hour = kwargs.pop('maximum_operation_per_hour', None) - if maximum_operation_per_hour is not None: - warnings.warn( - "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", - DeprecationWarning, - stacklevel=3, - ) - if maximum_temporal_per_hour is not None: - raise ValueError( - 'Either maximum_operation_per_hour or maximum_temporal_per_hour can be specified, but not both.' - ) - maximum_temporal_per_hour = maximum_operation_per_hour - - return ( - minimum_temporal, - maximum_temporal, - minimum_nontemporal, - maximum_nontemporal, - minimum_temporal_per_hour, - maximum_temporal_per_hour, - ) + # Update attributes with deprecated values if provided + for param_name, param_value in updated_params.items(): + setattr(self, param_name, param_value) + + # Validate any remaining unexpected kwargs + self._validate_kwargs(kwargs) + + # Set remaining attributes + self.minimum_total = minimum_total + self.maximum_total = maximum_total # Backwards compatible properties (deprecated) @property diff --git a/flixopt/structure.py b/flixopt/structure.py index 12cd99c13..664e52107 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -232,6 +232,49 @@ def _validate_kwargs(self, kwargs: dict, class_name: str = None) -> None: unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) raise TypeError(f'{class_name}.__init__() got unexpected keyword argument(s): {unexpected_params}') + def _handle_deprecated_params(self, kwargs: dict, deprecated_mappings: dict) -> dict: + """ + Handle deprecated parameters by extracting them from kwargs and issuing warnings. + + This method centralizes the deprecated parameter handling pattern used across classes. + It extracts deprecated parameters, issues deprecation warnings, checks for conflicts, + and returns the updated parameter values. + + Args: + kwargs: Dictionary of keyword arguments that may contain deprecated parameters + deprecated_mappings: Dictionary mapping deprecated parameter names to new parameter names + Format: {'deprecated_name': 'new_name'} + + Returns: + Dictionary with new parameter names as keys and their values + + Raises: + ValueError: If both deprecated and new parameters are specified simultaneously + """ + import warnings + + updated_params = {} + + for deprecated_name, new_name in deprecated_mappings.items(): + deprecated_value = kwargs.pop(deprecated_name, None) + + if deprecated_value is not None: + # Issue deprecation warning + warnings.warn( + f"Parameter '{deprecated_name}' is deprecated. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=3, # Skip this method and the calling method to point to user code + ) + + # Check if the new parameter is already set + current_value = getattr(self, new_name, None) + if current_value is not None: + raise ValueError(f"Either '{deprecated_name}' or '{new_name}' can be specified, but not both.") + + updated_params[new_name] = deprecated_value + + return updated_params + @classmethod def _deserialize_dict(cls, data: dict) -> dict | Interface: if '__class__' in data: From 51bbe4ec67d518f21c3cc2e49b0b92b23cba8e39 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:37:59 +0200 Subject: [PATCH 18/27] Revert "centralize logic for deprectaed params" This reverts commit 4a8257422e94dbdfe3c4249ad70e4642e79122a1. --- flixopt/components.py | 97 ++++++++++++++++---------------------- flixopt/effects.py | 106 +++++++++++++++++++++++++++++++++--------- flixopt/structure.py | 43 ----------------- 3 files changed, 124 insertions(+), 122 deletions(-) diff --git a/flixopt/components.py b/flixopt/components.py index 1cfc15d9d..3bb435cb1 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -970,39 +970,36 @@ def __init__( meta_data: dict | None = None, **kwargs, ): - # Handle backwards compatibility for deprecated parameters - deprecated_mappings = { - 'source': 'outputs', - 'sink': 'inputs', - 'prevent_simultaneous_sink_and_source': 'prevent_simultaneous_flow_rates', - } - - # Set attribute values to allow conflict checking - self._temp_outputs = outputs - self._temp_inputs = inputs - self._temp_prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates - - # Handle deprecated parameters using centralized method - updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) - - # Update parameters with deprecated values if provided, with special handling for list parameters - if 'outputs' in updated_params: + source = kwargs.pop('source', None) + sink = kwargs.pop('sink', None) + prevent_simultaneous_sink_and_source = kwargs.pop('prevent_simultaneous_sink_and_source', None) + if source is not None: + warnings.warn( + 'The use of the source argument is deprecated. Use the outputs argument instead.', + DeprecationWarning, + stacklevel=2, + ) if outputs is not None: raise ValueError('Either source or outputs can be specified, but not both.') - outputs = [updated_params['outputs']] + outputs = [source] - if 'inputs' in updated_params: + if sink is not None: + warnings.warn( + 'The use of the sink argument is deprecated. Use the inputs argument instead.', + DeprecationWarning, + stacklevel=2, + ) if inputs is not None: raise ValueError('Either sink or inputs can be specified, but not both.') - inputs = [updated_params['inputs']] + inputs = [sink] - if 'prevent_simultaneous_flow_rates' in updated_params: - prevent_simultaneous_flow_rates = updated_params['prevent_simultaneous_flow_rates'] - - # Clean up temporary attributes - delattr(self, '_temp_outputs') - delattr(self, '_temp_inputs') - delattr(self, '_temp_prevent_simultaneous_flow_rates') + if prevent_simultaneous_sink_and_source is not None: + warnings.warn( + 'The use of the prevent_simultaneous_sink_and_source argument is deprecated. Use the prevent_simultaneous_flow_rates argument instead.', + DeprecationWarning, + stacklevel=2, + ) + prevent_simultaneous_flow_rates = prevent_simultaneous_sink_and_source # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -1128,23 +1125,16 @@ def __init__( prevent_simultaneous_flow_rates: bool = False, **kwargs, ): - # Handle backwards compatibility for deprecated parameters - deprecated_mappings = {'source': 'outputs'} - - # Set attribute to allow conflict checking - self._temp_outputs = outputs - - # Handle deprecated parameters using centralized method - updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) - - # Update parameters with deprecated values if provided - if 'outputs' in updated_params: + source = kwargs.pop('source', None) + if source is not None: + warnings.warn( + 'The use of the source argument is deprecated. Use the outputs argument instead.', + DeprecationWarning, + stacklevel=2, + ) if outputs is not None: raise ValueError('Either source or outputs can be specified, but not both.') - outputs = [updated_params['outputs']] - - # Clean up temporary attribute - delattr(self, '_temp_outputs') + outputs = [source] # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -1266,23 +1256,16 @@ def __init__( Note: The deprecated `sink` kwarg is accepted for compatibility but will be removed in future releases. """ - # Handle backwards compatibility for deprecated parameters - deprecated_mappings = {'sink': 'inputs'} - - # Set attribute to allow conflict checking - self._temp_inputs = inputs - - # Handle deprecated parameters using centralized method - updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) - - # Update parameters with deprecated values if provided - if 'inputs' in updated_params: + sink = kwargs.pop('sink', None) + if sink is not None: + warnings.warn( + 'The use of the sink argument is deprecated. Use the inputs argument instead.', + DeprecationWarning, + stacklevel=2, + ) if inputs is not None: raise ValueError('Either sink or inputs can be specified, but not both.') - inputs = [updated_params['inputs']] - - # Clean up temporary attribute - delattr(self, '_temp_inputs') + inputs = [sink] # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) diff --git a/flixopt/effects.py b/flixopt/effects.py index 5c5059901..3e404ec65 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -172,34 +172,96 @@ def __init__( self.specific_share_to_other_effects_invest: EffectValuesUser = specific_share_to_other_effects_invest or {} # Handle backwards compatibility for deprecated parameters - deprecated_mappings = { - 'minimum_operation': 'minimum_temporal', - 'maximum_operation': 'maximum_temporal', - 'minimum_invest': 'minimum_nontemporal', - 'maximum_invest': 'maximum_nontemporal', - 'minimum_operation_per_hour': 'minimum_temporal_per_hour', - 'maximum_operation_per_hour': 'maximum_temporal_per_hour', - } - - # Set attribute values before calling _handle_deprecated_params to allow conflict checking + import warnings + + # Extract deprecated parameters from kwargs + minimum_operation = kwargs.pop('minimum_operation', None) + maximum_operation = kwargs.pop('maximum_operation', None) + minimum_invest = kwargs.pop('minimum_invest', None) + maximum_invest = kwargs.pop('maximum_invest', None) + minimum_operation_per_hour = kwargs.pop('minimum_operation_per_hour', None) + maximum_operation_per_hour = kwargs.pop('maximum_operation_per_hour', None) + + # Handle minimum_temporal + if minimum_operation is not None: + warnings.warn( + "Parameter 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + if minimum_temporal is not None: + raise ValueError('Either minimum_operation or minimum_temporal can be specified, but not both.') + minimum_temporal = minimum_operation + + # Handle maximum_temporal + if maximum_operation is not None: + warnings.warn( + "Parameter 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + if maximum_temporal is not None: + raise ValueError('Either maximum_operation or maximum_temporal can be specified, but not both.') + maximum_temporal = maximum_operation + + # Handle minimum_nontemporal + if minimum_invest is not None: + warnings.warn( + "Parameter 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + if minimum_nontemporal is not None: + raise ValueError('Either minimum_invest or minimum_nontemporal can be specified, but not both.') + minimum_nontemporal = minimum_invest + + # Handle maximum_nontemporal + if maximum_invest is not None: + warnings.warn( + "Parameter 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", + DeprecationWarning, + stacklevel=2, + ) + if maximum_nontemporal is not None: + raise ValueError('Either maximum_invest or maximum_nontemporal can be specified, but not both.') + maximum_nontemporal = maximum_invest + + # Handle minimum_temporal_per_hour + if minimum_operation_per_hour is not None: + warnings.warn( + "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + if minimum_temporal_per_hour is not None: + raise ValueError( + 'Either minimum_operation_per_hour or minimum_temporal_per_hour can be specified, but not both.' + ) + minimum_temporal_per_hour = minimum_operation_per_hour + + # Handle maximum_temporal_per_hour + if maximum_operation_per_hour is not None: + warnings.warn( + "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + DeprecationWarning, + stacklevel=2, + ) + if maximum_temporal_per_hour is not None: + raise ValueError( + 'Either maximum_operation_per_hour or maximum_temporal_per_hour can be specified, but not both.' + ) + maximum_temporal_per_hour = maximum_operation_per_hour + + # Validate any remaining unexpected kwargs + self._validate_kwargs(kwargs) + + # Set attributes directly self.minimum_temporal = minimum_temporal self.maximum_temporal = maximum_temporal self.minimum_nontemporal = minimum_nontemporal self.maximum_nontemporal = maximum_nontemporal self.minimum_temporal_per_hour = minimum_temporal_per_hour self.maximum_temporal_per_hour = maximum_temporal_per_hour - - # Handle deprecated parameters using centralized method - updated_params = self._handle_deprecated_params(kwargs, deprecated_mappings) - - # Update attributes with deprecated values if provided - for param_name, param_value in updated_params.items(): - setattr(self, param_name, param_value) - - # Validate any remaining unexpected kwargs - self._validate_kwargs(kwargs) - - # Set remaining attributes self.minimum_total = minimum_total self.maximum_total = maximum_total diff --git a/flixopt/structure.py b/flixopt/structure.py index 664e52107..12cd99c13 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -232,49 +232,6 @@ def _validate_kwargs(self, kwargs: dict, class_name: str = None) -> None: unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys()) raise TypeError(f'{class_name}.__init__() got unexpected keyword argument(s): {unexpected_params}') - def _handle_deprecated_params(self, kwargs: dict, deprecated_mappings: dict) -> dict: - """ - Handle deprecated parameters by extracting them from kwargs and issuing warnings. - - This method centralizes the deprecated parameter handling pattern used across classes. - It extracts deprecated parameters, issues deprecation warnings, checks for conflicts, - and returns the updated parameter values. - - Args: - kwargs: Dictionary of keyword arguments that may contain deprecated parameters - deprecated_mappings: Dictionary mapping deprecated parameter names to new parameter names - Format: {'deprecated_name': 'new_name'} - - Returns: - Dictionary with new parameter names as keys and their values - - Raises: - ValueError: If both deprecated and new parameters are specified simultaneously - """ - import warnings - - updated_params = {} - - for deprecated_name, new_name in deprecated_mappings.items(): - deprecated_value = kwargs.pop(deprecated_name, None) - - if deprecated_value is not None: - # Issue deprecation warning - warnings.warn( - f"Parameter '{deprecated_name}' is deprecated. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=3, # Skip this method and the calling method to point to user code - ) - - # Check if the new parameter is already set - current_value = getattr(self, new_name, None) - if current_value is not None: - raise ValueError(f"Either '{deprecated_name}' or '{new_name}' can be specified, but not both.") - - updated_params[new_name] = deprecated_value - - return updated_params - @classmethod def _deserialize_dict(cls, data: dict) -> dict | Interface: if '__class__' in data: From 4705a4f5c331f0506c7eef782651dba246e8ee1c Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:41:35 +0200 Subject: [PATCH 19/27] Add "" to warnings --- flixopt/components.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/flixopt/components.py b/flixopt/components.py index 3bb435cb1..2f60f2759 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -975,7 +975,7 @@ def __init__( prevent_simultaneous_sink_and_source = kwargs.pop('prevent_simultaneous_sink_and_source', None) if source is not None: warnings.warn( - 'The use of the source argument is deprecated. Use the outputs argument instead.', + 'The use of the "source" argument is deprecated. Use the "outputs" argument instead.', DeprecationWarning, stacklevel=2, ) @@ -985,7 +985,7 @@ def __init__( if sink is not None: warnings.warn( - 'The use of the sink argument is deprecated. Use the inputs argument instead.', + 'The use of the "sink" argument is deprecated. Use the "inputs" argument instead.', DeprecationWarning, stacklevel=2, ) @@ -995,7 +995,7 @@ def __init__( if prevent_simultaneous_sink_and_source is not None: warnings.warn( - 'The use of the prevent_simultaneous_sink_and_source argument is deprecated. Use the prevent_simultaneous_flow_rates argument instead.', + 'The use of the "prevent_simultaneous_sink_and_source" argument is deprecated. Use the "prevent_simultaneous_flow_rates" argument instead.', DeprecationWarning, stacklevel=2, ) @@ -1128,7 +1128,7 @@ def __init__( source = kwargs.pop('source', None) if source is not None: warnings.warn( - 'The use of the source argument is deprecated. Use the outputs argument instead.', + 'The use of the "source" argument is deprecated. Use the "outputs" argument instead.', DeprecationWarning, stacklevel=2, ) @@ -1259,7 +1259,7 @@ def __init__( sink = kwargs.pop('sink', None) if sink is not None: warnings.warn( - 'The use of the sink argument is deprecated. Use the inputs argument instead.', + 'The use of the "sink" argument is deprecated. Use the "inputs" argument instead.', DeprecationWarning, stacklevel=2, ) From 7dc51aa0fbeb562a64f3e4f73814bf9f351928e6 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:45:25 +0200 Subject: [PATCH 20/27] Revert change in examples --- examples/01_Simple/simple_example.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/01_Simple/simple_example.py b/examples/01_Simple/simple_example.py index 626109344..8239f805a 100644 --- a/examples/01_Simple/simple_example.py +++ b/examples/01_Simple/simple_example.py @@ -37,7 +37,7 @@ unit='kg', description='CO2_e-Emissionen', specific_share_to_other_effects_operation={costs.label: 0.2}, - maximum_temporal_per_hour=1000, # Max CO2 emissions per hour + maximum_operation_per_hour=1000, # Max CO2 emissions per hour ) # --- Define Flow System Components --- From b4d5e8b9e5ef48687809a2f4d4319be3e026dd70 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:54:47 +0200 Subject: [PATCH 21/27] Improve BackwardsCompatibleDataset --- flixopt/results.py | 173 +++++++++++++++++++++++++++++++++++++-------- 1 file changed, 144 insertions(+), 29 deletions(-) diff --git a/flixopt/results.py b/flixopt/results.py index 0a123783f..f355ea275 100644 --- a/flixopt/results.py +++ b/flixopt/results.py @@ -29,41 +29,113 @@ class BackwardsCompatibleDataset: - """Wrapper around xarray.Dataset to provide backwards compatibility for renamed variables.""" + """Wrapper around xarray.Dataset to provide backwards compatibility for renamed variables. - # Mapping from old variable names to new variable names + This class handles multiple types of backwards compatibility: + 1. Direct variable name mappings (e.g., 'costs|total' -> 'costs') + 2. Pattern-based substitutions (e.g., (operation) -> (temporal)) + 3. Dynamic renaming rules for complex variable naming schemes + """ + + # Direct mapping from old variable names to new variable names DEPRECATED_VARIABLE_MAPPING = { # Effect variable names 'costs|total': 'costs', - # Cross-effect variable names (operation -> temporal, invest -> nontemporal) - # This will be handled dynamically in __getitem__ } + # Pattern-based substitution rules (old_pattern, new_pattern, condition_func) + PATTERN_SUBSTITUTION_RULES = [ + # Cross-effect variables: (operation) -> (temporal), (invest) -> (nontemporal) + ('(operation)', '(temporal)', lambda key: '->' in key), + ('(invest)', '(nontemporal)', lambda key: '->' in key), + ] + + # Regex-based renaming patterns for more complex cases + @staticmethod + def _get_regex_patterns(): + """Get regex patterns for complex variable renaming. + + Returns: + List of tuples (pattern, replacement, description) for regex-based renaming. + """ + import re + + return [ + # Pattern for effect parameter names: *_operation -> *_temporal + (re.compile(r'(.+)_operation($|_)'), r'\1_temporal\2', 'operation suffix to temporal'), + # Pattern for effect parameter names: *_invest -> *_nontemporal + (re.compile(r'(.+)_invest($|_)'), r'\1_nontemporal\2', 'invest suffix to nontemporal'), + # Pattern for per-hour variants: *_operation_per_hour -> *_temporal_per_hour + ( + re.compile(r'(.+)_operation_per_hour'), + r'\1_temporal_per_hour', + 'operation_per_hour to temporal_per_hour', + ), + ] + def __init__(self, dataset: xr.Dataset): self._dataset = dataset + # Cache for resolved mappings to improve performance + self._mapping_cache = {} - def __getitem__(self, key): - """Access dataset variables with backwards compatibility.""" - # Handle direct mapping first + def _resolve_deprecated_key(self, key): + """Resolve a deprecated variable name to its current equivalent. + + Args: + key: Variable name to resolve + + Returns: + Tuple of (new_key, found) where found indicates if a mapping was found + """ + # Check cache first + if key in self._mapping_cache: + return self._mapping_cache[key], True + + # 1. Direct mapping if key in self.DEPRECATED_VARIABLE_MAPPING: new_key = self.DEPRECATED_VARIABLE_MAPPING[key] + if new_key in self._dataset: + self._mapping_cache[key] = new_key + return new_key, True + + # 2. Pattern-based substitution + for old_pattern, new_pattern, condition_func in self.PATTERN_SUBSTITUTION_RULES: + if condition_func(key) and old_pattern in key: + new_key = key.replace(old_pattern, new_pattern) + if new_key in self._dataset: + self._mapping_cache[key] = new_key + return new_key, True + + # 3. Regex-based patterns + for pattern, replacement, _description in self._get_regex_patterns(): + if pattern.search(key): + new_key = pattern.sub(replacement, key) + if new_key in self._dataset: + self._mapping_cache[key] = new_key + return new_key, True + + # 4. Check if key exists as-is in dataset + if key in self._dataset: + return key, True + + return key, False + + def __getitem__(self, key): + """Access dataset variables with backwards compatibility.""" + new_key, found = self._resolve_deprecated_key(key) + + if found and new_key != key: + # Issue deprecation warning for renamed variables warnings.warn( f"Variable name '{key}' is deprecated. Use '{new_key}' instead.", DeprecationWarning, stacklevel=2 ) return self._dataset[new_key] - - # Handle cross-effect variables dynamically - if '->' in key and ('(operation)' in key or '(invest)' in key): - # Replace (operation) -> (temporal) and (invest) -> (nontemporal) - new_key = key.replace('(operation)', '(temporal)').replace('(invest)', '(nontemporal)') - if new_key in self._dataset: - warnings.warn( - f"Variable name '{key}' is deprecated. Use '{new_key}' instead.", DeprecationWarning, stacklevel=2 - ) - return self._dataset[new_key] - - # Default to original dataset behavior - return self._dataset[key] + elif found: + # Key exists as-is + return self._dataset[key] + else: + # Key not found - let dataset handle the error + return self._dataset[key] def __getattr__(self, name): """Delegate all other attributes to the wrapped dataset.""" @@ -71,15 +143,58 @@ def __getattr__(self, name): def __contains__(self, key): """Check if key exists in dataset (with backwards compatibility).""" - if key in self._dataset: - return True - if key in self.DEPRECATED_VARIABLE_MAPPING: - return self.DEPRECATED_VARIABLE_MAPPING[key] in self._dataset - # Check cross-effect variables - if '->' in key and ('(operation)' in key or '(invest)' in key): - new_key = key.replace('(operation)', '(temporal)').replace('(invest)', '(nontemporal)') - return new_key in self._dataset - return False + new_key, found = self._resolve_deprecated_key(key) + return found + + def __iter__(self): + """Iterate over dataset variables.""" + return iter(self._dataset) + + def __len__(self): + """Get number of variables in dataset.""" + return len(self._dataset) + + def keys(self): + """Get dataset variable names.""" + return self._dataset.keys() + + def values(self): + """Get dataset variable values.""" + return self._dataset.values() + + def items(self): + """Get dataset variable items.""" + return self._dataset.items() + + def get_deprecated_mappings(self): + """Get all currently active deprecated mappings for debugging. + + Returns: + Dict mapping deprecated names to current names for variables in this dataset. + """ + mappings = {} + + # Check all variables in dataset against all deprecation rules + for var_name in self._dataset.data_vars: + # Check if any deprecated patterns would map TO this variable + + # Direct mappings (reverse lookup) + for old_name, new_name in self.DEPRECATED_VARIABLE_MAPPING.items(): + if new_name == var_name and old_name not in self._dataset: + mappings[old_name] = var_name + + # Pattern-based rules (generate potential old names) + for old_pattern, new_pattern, condition_func in self.PATTERN_SUBSTITUTION_RULES: + if new_pattern in var_name: + potential_old_name = var_name.replace(new_pattern, old_pattern) + if condition_func(potential_old_name) and potential_old_name not in self._dataset: + mappings[potential_old_name] = var_name + + # Regex patterns (reverse lookup) + # This is complex for reverse lookup, so we'll skip for now + # Could be added if needed for debugging + + return mappings @property def _raw_dataset(self): From f5fbfc6609649404ebcf5f4d244d604dab8d2d24 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 12:01:18 +0200 Subject: [PATCH 22/27] Add unit tests for backwards compatability --- tests/test_backwards_compatible_dataset.py | 443 +++++++++++++++++++++ 1 file changed, 443 insertions(+) create mode 100644 tests/test_backwards_compatible_dataset.py diff --git a/tests/test_backwards_compatible_dataset.py b/tests/test_backwards_compatible_dataset.py new file mode 100644 index 000000000..a2ae8f70b --- /dev/null +++ b/tests/test_backwards_compatible_dataset.py @@ -0,0 +1,443 @@ +"""Unit tests for BackwardsCompatibleDataset functionality.""" + +import warnings + +import numpy as np +import pandas as pd +import pytest +import xarray as xr + +from flixopt.results import BackwardsCompatibleDataset + + +class TestBackwardsCompatibleDataset: + """Test suite for BackwardsCompatibleDataset class.""" + + @pytest.fixture + def sample_dataset(self): + """Create a sample dataset with new variable names for testing.""" + data = { + # Effect variables (new names) + 'costs': xr.DataArray([100.0], dims=['scalar']), + 'emissions': xr.DataArray([50.0], dims=['scalar']), + # Cross-effect variables with new naming + 'Boiler_01(Natural_Gas)->costs(temporal)': xr.DataArray([20.0, 25.0, 30.0], dims=['time']), + 'Boiler_01(Natural_Gas)->costs(nontemporal)': xr.DataArray([500.0], dims=['scalar']), + 'HeatPump_02(Electricity)->emissions(temporal)': xr.DataArray([5.0, 6.0, 7.0], dims=['time']), + 'Storage_01(Electricity)->emissions(nontemporal)': xr.DataArray([10.0], dims=['scalar']), + # Parameter variables with new naming + 'minimum_temporal': xr.DataArray([10.0], dims=['scalar']), + 'maximum_temporal': xr.DataArray([200.0], dims=['scalar']), + 'minimum_nontemporal': xr.DataArray([5.0], dims=['scalar']), + 'maximum_nontemporal': xr.DataArray([500.0], dims=['scalar']), + 'minimum_temporal_per_hour': xr.DataArray([1.0], dims=['scalar']), + 'maximum_temporal_per_hour': xr.DataArray([10.0], dims=['scalar']), + # Regular variables (no renaming needed) + 'flow_rate': xr.DataArray([75.0, 85.0, 95.0], dims=['time']), + 'charge_state': xr.DataArray([40.0, 60.0, 80.0], dims=['time']), + } + + coords = {'time': pd.date_range('2023-01-01', periods=3, freq='h'), 'scalar': ['value']} + + return xr.Dataset(data, coords=coords) + + @pytest.fixture + def bc_dataset(self, sample_dataset): + """Create a BackwardsCompatibleDataset instance.""" + return BackwardsCompatibleDataset(sample_dataset) + + def test_init(self, sample_dataset): + """Test BackwardsCompatibleDataset initialization.""" + bc_dataset = BackwardsCompatibleDataset(sample_dataset) + assert bc_dataset._dataset is sample_dataset + assert bc_dataset._mapping_cache == {} + + @pytest.mark.parametrize( + 'deprecated_name,expected_name', + [ + ('costs|total', 'costs'), + # Add more direct mappings here as they are added to DEPRECATED_VARIABLE_MAPPING + ], + ) + def test_direct_mapping_access(self, bc_dataset, sample_dataset, deprecated_name, expected_name): + """Test accessing variables via direct mapping.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + result = bc_dataset[deprecated_name] + + # Should issue exactly one deprecation warning + assert len(w) == 1 + assert issubclass(w[0].category, DeprecationWarning) + assert f"'{deprecated_name}' is deprecated" in str(w[0].message) + assert f"Use '{expected_name}' instead" in str(w[0].message) + + # Should return the correct data + expected = sample_dataset[expected_name] + assert np.array_equal(result.values, expected.values) + assert result.dims == expected.dims + + @pytest.mark.parametrize( + 'deprecated_key,expected_key', + [ + # (operation) -> (temporal) pattern substitutions + ('Boiler_01(Natural_Gas)->costs(operation)', 'Boiler_01(Natural_Gas)->costs(temporal)'), + ('HeatPump_02(Electricity)->emissions(operation)', 'HeatPump_02(Electricity)->emissions(temporal)'), + # (invest) -> (nontemporal) pattern substitutions + ('Boiler_01(Natural_Gas)->costs(invest)', 'Boiler_01(Natural_Gas)->costs(nontemporal)'), + ('Storage_01(Electricity)->emissions(invest)', 'Storage_01(Electricity)->emissions(nontemporal)'), + ], + ) + def test_pattern_substitution_cross_effects(self, bc_dataset, sample_dataset, deprecated_key, expected_key): + """Test pattern-based substitution for cross-effect variables.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + result = bc_dataset[deprecated_key] + + # Check warning + assert len(w) == 1 + assert issubclass(w[0].category, DeprecationWarning) + assert f"'{deprecated_key}' is deprecated" in str(w[0].message) + assert f"Use '{expected_key}' instead" in str(w[0].message) + + # Check data correctness + expected = sample_dataset[expected_key] + assert np.array_equal(result.values, expected.values) + + @pytest.mark.parametrize( + 'deprecated_name,expected_name', + [ + ('minimum_operation', 'minimum_temporal'), + ('maximum_operation', 'maximum_temporal'), + ('minimum_invest', 'minimum_nontemporal'), + ('maximum_invest', 'maximum_nontemporal'), + ('minimum_operation_per_hour', 'minimum_temporal_per_hour'), + ('maximum_operation_per_hour', 'maximum_temporal_per_hour'), + ], + ) + def test_regex_patterns(self, bc_dataset, sample_dataset, deprecated_name, expected_name): + """Test regex-based pattern matching for parameter names.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + result = bc_dataset[deprecated_name] + + # Check warning + assert len(w) == 1 + assert issubclass(w[0].category, DeprecationWarning) + assert f"'{deprecated_name}' is deprecated" in str(w[0].message) + assert f"Use '{expected_name}' instead" in str(w[0].message) + + # Check data correctness + expected = sample_dataset[expected_name] + assert np.array_equal(result.values, expected.values) + + def test_no_renaming_for_existing_variables(self, bc_dataset, sample_dataset): + """Test that existing variables are accessed without warnings.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + result = bc_dataset['flow_rate'] + + # Should not issue any warnings + assert len(w) == 0 + + # Should return correct data + expected = sample_dataset['flow_rate'] + assert np.array_equal(result.values, expected.values) + + @pytest.mark.parametrize( + 'nonexistent_key,description', + [ + ('nonexistent_variable', 'completely nonexistent variable'), + ('missing_operation', 'regex pattern that would transform but target missing'), + ('missing_invest', 'regex pattern that would transform but target missing'), + ('Component->missing(operation)', 'cross-effect pattern that would transform but target missing'), + ('Component->missing(invest)', 'cross-effect pattern that would transform but target missing'), + ('fake|mapping', 'direct mapping pattern but not in mapping dict'), + ('costs|wrong', 'partial direct mapping but wrong suffix'), + ], + ) + def test_nonexistent_variable_error(self, bc_dataset, nonexistent_key, description): + """Test that accessing nonexistent variables raises appropriate errors.""" + # Just test that KeyError is raised, don't try to match the message for complex keys + if '->' in nonexistent_key or '|' in nonexistent_key: + with pytest.raises(KeyError): + bc_dataset[nonexistent_key] + else: + with pytest.raises(KeyError, match=nonexistent_key): + bc_dataset[nonexistent_key] + + @pytest.mark.parametrize( + 'variable_name,should_exist', + [ + # Existing variables (current names) + ('costs', True), + ('emissions', True), + ('flow_rate', True), + ('charge_state', True), + ('minimum_temporal', True), + ('maximum_nontemporal', True), + # Direct mapping deprecated names + ('costs|total', True), + # Pattern substitution deprecated names + ('Boiler_01(Natural_Gas)->costs(operation)', True), + ('Storage_01(Electricity)->emissions(invest)', True), + ('HeatPump_02(Electricity)->emissions(operation)', True), + # Regex pattern deprecated names + ('minimum_operation', True), + ('maximum_invest', True), + ('minimum_operation_per_hour', True), + ('maximum_operation_per_hour', True), + # Non-existent variables + ('nonexistent_var', False), + ('another_missing(operation)', False), + ('fake_operation', False), + ('not_real_invest', False), + ('', False), # Empty string + ], + ) + def test_contains_method(self, bc_dataset, variable_name, should_exist): + """Test __contains__ method with backwards compatibility.""" + if should_exist: + assert variable_name in bc_dataset + else: + assert variable_name not in bc_dataset + + @pytest.mark.parametrize( + 'deprecated_key,expected_key,mapping_type', + [ + ('costs|total', 'costs', 'direct_mapping'), + ( + 'Boiler_01(Natural_Gas)->costs(operation)', + 'Boiler_01(Natural_Gas)->costs(temporal)', + 'pattern_substitution', + ), + ('minimum_operation', 'minimum_temporal', 'regex_pattern'), + ('maximum_invest', 'maximum_nontemporal', 'regex_pattern'), + ], + ) + def test_caching_mechanism(self, bc_dataset, deprecated_key, expected_key, mapping_type): + """Test that the mapping cache works correctly for all mapping types.""" + # Initial cache should be empty for this key + assert deprecated_key not in bc_dataset._mapping_cache + + # First access should populate cache + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + result1 = bc_dataset[deprecated_key] + + # Check that mapping was cached + assert deprecated_key in bc_dataset._mapping_cache + assert bc_dataset._mapping_cache[deprecated_key] == expected_key + + # Second access should use cache (same result, no additional processing) + cache_before = bc_dataset._mapping_cache.copy() + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + result2 = bc_dataset[deprecated_key] + + # Cache should be unchanged and results should be identical + assert bc_dataset._mapping_cache == cache_before + assert np.array_equal(result1.values, result2.values) + + def test_get_deprecated_mappings(self, bc_dataset): + """Test the get_deprecated_mappings debugging method.""" + mappings = bc_dataset.get_deprecated_mappings() + + # Should include direct mappings + assert 'costs|total' in mappings + assert mappings['costs|total'] == 'costs' + + # Check some specific pattern-based mappings that should exist + # These correspond to the actual variables in our sample dataset + expected_mappings = [ + ('Boiler_01(Natural_Gas)->costs(operation)', 'Boiler_01(Natural_Gas)->costs(temporal)'), + ('Boiler_01(Natural_Gas)->costs(invest)', 'Boiler_01(Natural_Gas)->costs(nontemporal)'), + ('HeatPump_02(Electricity)->emissions(operation)', 'HeatPump_02(Electricity)->emissions(temporal)'), + ('Storage_01(Electricity)->emissions(invest)', 'Storage_01(Electricity)->emissions(nontemporal)'), + ] + + for old_name, new_name in expected_mappings: + if new_name in bc_dataset._dataset: # Only check if target exists + assert old_name in mappings, f"Expected mapping '{old_name}' -> '{new_name}' not found" + assert mappings[old_name] == new_name + + # Should be a reasonable number of mappings + assert len(mappings) >= 3 # At least direct + some pattern mappings + + # Debug print to help understand what mappings were found + print(f'\nFound {len(mappings)} deprecated mappings:') + for old, new in mappings.items(): + print(f' {old} -> {new}') + + def test_dataset_method_delegation(self, bc_dataset, sample_dataset): + """Test that dataset methods are properly delegated.""" + # Test len + assert len(bc_dataset) == len(sample_dataset) + + # Test keys + assert list(bc_dataset.keys()) == list(sample_dataset.keys()) + + # Test iteration + bc_vars = list(bc_dataset) + dataset_vars = list(sample_dataset) + assert bc_vars == dataset_vars + + # Test values method exists + assert hasattr(bc_dataset, 'values') + + # Test items method exists + assert hasattr(bc_dataset, 'items') + + def test_raw_dataset_property(self, bc_dataset, sample_dataset): + """Test _raw_dataset property returns the original dataset.""" + assert bc_dataset._raw_dataset is sample_dataset + + def test_getattr_delegation(self, bc_dataset, sample_dataset): + """Test that unknown attributes are delegated to the wrapped dataset.""" + # Test accessing dataset attributes + assert bc_dataset.sizes == sample_dataset.sizes + assert bc_dataset.coords.keys() == sample_dataset.coords.keys() + assert list(bc_dataset.data_vars.keys()) == list(sample_dataset.data_vars.keys()) + + @pytest.mark.parametrize( + 'test_key,should_trigger_pattern,description', + [ + # Cases that should NOT trigger cross-effect pattern substitution (no ->) + ('operation_only', False, 'standalone operation word without arrow'), + ('invest_only', False, 'standalone invest word without arrow'), + ('some_operation_name', False, 'operation in middle of name without arrow'), + ('invest_cost', False, 'invest at start of name without arrow'), + # Cases that SHOULD trigger cross-effect pattern substitution (has ->) + ('NonExistent->something(operation)', True, 'has arrow and operation pattern'), + ('Component->effect(invest)', True, 'has arrow and invest pattern'), + ('A->B(operation)', True, 'minimal arrow with operation'), + ('X->Y(invest)', True, 'minimal arrow with invest'), + # Cases that should NOT trigger because missing parentheses + ('Component->operation', False, 'has arrow but no parentheses around operation'), + ('Component->invest', False, 'has arrow but no parentheses around invest'), + ], + ) + def test_pattern_condition_functions(self, bc_dataset, test_key, should_trigger_pattern, description): + """Test that pattern conditions work correctly.""" + # All these test keys should fail with KeyError since they don't exist + # But the important thing is whether they trigger pattern matching attempt + with pytest.raises(KeyError): + bc_dataset[test_key] + + # We can indirectly test if pattern was attempted by checking the cache + # If pattern was attempted but failed, no cache entry should be made + assert test_key not in bc_dataset._mapping_cache + + def test_multiple_pattern_matching(self, bc_dataset, sample_dataset): + """Test behavior when multiple patterns could potentially match.""" + # Create a variable that could match multiple patterns + # This tests the order of pattern application + + # Add a variable that has both cross-effect pattern and could match regex + sample_dataset['test_operation'] = xr.DataArray([123.0], dims=['scalar']) + bc_dataset_new = BackwardsCompatibleDataset(sample_dataset) + + # This should match the regex pattern (*_operation -> *_temporal) + # not the cross-effect pattern (since no -> in key) + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + _ = bc_dataset_new['test_operation'] # Should access as-is, no renaming + + # Should not issue warning since 'test_operation' exists as-is + assert len(w) == 0 + + @pytest.mark.parametrize( + 'test_input,expected_exception,description', + [ + # Empty and None inputs + ('', KeyError, 'empty string'), + (None, TypeError, 'None input'), + # Partial pattern matches that shouldn't transform + ('not_a_real_operation_var', KeyError, 'operation in name but no real match'), + ('operation_but_no_suffix', KeyError, 'operation word but not as suffix'), + ('prefix_invest_middle', KeyError, 'invest in middle but not as suffix'), + ('something_operation_something', KeyError, 'operation in middle with suffixes'), + # Special characters + ('costs|', KeyError, 'partial pipe character'), + ('|total', KeyError, 'pipe at start'), + ('costs||total', KeyError, 'double pipe'), + ('costs|total|extra', KeyError, 'extra parts after valid pattern'), + # Arrow patterns that don't match + ('->', KeyError, 'just arrow'), + ('->operation', KeyError, 'arrow at start'), + ('operation->', KeyError, 'arrow at end'), + ('->->', KeyError, 'double arrow'), + # Mixed patterns + ('costs|total(operation)', KeyError, 'mixing direct and pattern syntax'), + ('minimum_operation|total', KeyError, 'mixing regex and direct syntax'), + ], + ) + def test_edge_cases(self, bc_dataset, test_input, expected_exception, description): + """Test various edge cases.""" + with pytest.raises(expected_exception): + bc_dataset[test_input] + + def test_performance_with_many_accesses(self, bc_dataset): + """Test that caching provides performance benefits.""" + deprecated_key = 'costs|total' + + # Access the same key many times + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + for _ in range(100): + result = bc_dataset[deprecated_key] + assert result is not None + + # Should have cached the result + assert deprecated_key in bc_dataset._mapping_cache + + def test_regex_pattern_coverage(self): + """Test that regex patterns cover expected cases.""" + patterns = BackwardsCompatibleDataset._get_regex_patterns() + + # Should have patterns for operation, invest, and operation_per_hour + descriptions = [desc for _, _, desc in patterns] + assert any('operation suffix to temporal' in desc for desc in descriptions) + assert any('invest suffix to nontemporal' in desc for desc in descriptions) + assert any('operation_per_hour to temporal_per_hour' in desc for desc in descriptions) + + def test_warning_message_format(self, bc_dataset): + """Test that warning messages have the correct format.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + bc_dataset['costs|total'] + + warning_msg = str(w[0].message) + assert "'costs|total' is deprecated" in warning_msg + assert "Use 'costs' instead" in warning_msg + + @pytest.mark.parametrize( + 'variable_name,value,description', + [ + # Similar names that should NOT trigger renaming + ('operational_cost', 999.0, 'operational (not operation) suffix'), + ('investment_return', 888.0, 'investment (not invest) prefix'), + ('operation_mode', 777.0, 'operation as prefix not suffix'), + ('invest_strategy', 666.0, 'invest as prefix not suffix'), + ('cooperative_effort', 555.0, 'contains operation but different context'), + ('reinvestment_plan', 444.0, 'contains invest but different context'), + ('temporal_sequence', 333.0, 'temporal as prefix (target name)'), + ('nontemporal_data', 222.0, 'nontemporal as prefix (target name)'), + ], + ) + def test_no_false_positives_for_similar_names(self, bc_dataset, sample_dataset, variable_name, value, description): + """Test that similar but different variable names don't trigger false matches.""" + # Add variable with similar name that shouldn't trigger renaming + sample_dataset[variable_name] = xr.DataArray([value], dims=['scalar']) + bc_dataset_new = BackwardsCompatibleDataset(sample_dataset) + + # Should be accessed without any renaming warnings + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + result = bc_dataset_new[variable_name] + + # Should not trigger deprecation warnings + deprecation_warnings = [warning for warning in w if issubclass(warning.category, DeprecationWarning)] + assert len(deprecation_warnings) == 0, f'Unexpected deprecation warning for {variable_name}: {description}' + + # Should return correct value + assert result.values == value From f6cfd6bea54f4a0887769d481ae4902814465771 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 12:09:51 +0200 Subject: [PATCH 23/27] Remove backwards compatible dataset --- flixopt/results.py | 176 +------- tests/test_backwards_compatible_dataset.py | 443 --------------------- 2 files changed, 1 insertion(+), 618 deletions(-) delete mode 100644 tests/test_backwards_compatible_dataset.py diff --git a/flixopt/results.py b/flixopt/results.py index f355ea275..698217704 100644 --- a/flixopt/results.py +++ b/flixopt/results.py @@ -28,180 +28,6 @@ logger = logging.getLogger('flixopt') -class BackwardsCompatibleDataset: - """Wrapper around xarray.Dataset to provide backwards compatibility for renamed variables. - - This class handles multiple types of backwards compatibility: - 1. Direct variable name mappings (e.g., 'costs|total' -> 'costs') - 2. Pattern-based substitutions (e.g., (operation) -> (temporal)) - 3. Dynamic renaming rules for complex variable naming schemes - """ - - # Direct mapping from old variable names to new variable names - DEPRECATED_VARIABLE_MAPPING = { - # Effect variable names - 'costs|total': 'costs', - } - - # Pattern-based substitution rules (old_pattern, new_pattern, condition_func) - PATTERN_SUBSTITUTION_RULES = [ - # Cross-effect variables: (operation) -> (temporal), (invest) -> (nontemporal) - ('(operation)', '(temporal)', lambda key: '->' in key), - ('(invest)', '(nontemporal)', lambda key: '->' in key), - ] - - # Regex-based renaming patterns for more complex cases - @staticmethod - def _get_regex_patterns(): - """Get regex patterns for complex variable renaming. - - Returns: - List of tuples (pattern, replacement, description) for regex-based renaming. - """ - import re - - return [ - # Pattern for effect parameter names: *_operation -> *_temporal - (re.compile(r'(.+)_operation($|_)'), r'\1_temporal\2', 'operation suffix to temporal'), - # Pattern for effect parameter names: *_invest -> *_nontemporal - (re.compile(r'(.+)_invest($|_)'), r'\1_nontemporal\2', 'invest suffix to nontemporal'), - # Pattern for per-hour variants: *_operation_per_hour -> *_temporal_per_hour - ( - re.compile(r'(.+)_operation_per_hour'), - r'\1_temporal_per_hour', - 'operation_per_hour to temporal_per_hour', - ), - ] - - def __init__(self, dataset: xr.Dataset): - self._dataset = dataset - # Cache for resolved mappings to improve performance - self._mapping_cache = {} - - def _resolve_deprecated_key(self, key): - """Resolve a deprecated variable name to its current equivalent. - - Args: - key: Variable name to resolve - - Returns: - Tuple of (new_key, found) where found indicates if a mapping was found - """ - # Check cache first - if key in self._mapping_cache: - return self._mapping_cache[key], True - - # 1. Direct mapping - if key in self.DEPRECATED_VARIABLE_MAPPING: - new_key = self.DEPRECATED_VARIABLE_MAPPING[key] - if new_key in self._dataset: - self._mapping_cache[key] = new_key - return new_key, True - - # 2. Pattern-based substitution - for old_pattern, new_pattern, condition_func in self.PATTERN_SUBSTITUTION_RULES: - if condition_func(key) and old_pattern in key: - new_key = key.replace(old_pattern, new_pattern) - if new_key in self._dataset: - self._mapping_cache[key] = new_key - return new_key, True - - # 3. Regex-based patterns - for pattern, replacement, _description in self._get_regex_patterns(): - if pattern.search(key): - new_key = pattern.sub(replacement, key) - if new_key in self._dataset: - self._mapping_cache[key] = new_key - return new_key, True - - # 4. Check if key exists as-is in dataset - if key in self._dataset: - return key, True - - return key, False - - def __getitem__(self, key): - """Access dataset variables with backwards compatibility.""" - new_key, found = self._resolve_deprecated_key(key) - - if found and new_key != key: - # Issue deprecation warning for renamed variables - warnings.warn( - f"Variable name '{key}' is deprecated. Use '{new_key}' instead.", DeprecationWarning, stacklevel=2 - ) - return self._dataset[new_key] - elif found: - # Key exists as-is - return self._dataset[key] - else: - # Key not found - let dataset handle the error - return self._dataset[key] - - def __getattr__(self, name): - """Delegate all other attributes to the wrapped dataset.""" - return getattr(self._dataset, name) - - def __contains__(self, key): - """Check if key exists in dataset (with backwards compatibility).""" - new_key, found = self._resolve_deprecated_key(key) - return found - - def __iter__(self): - """Iterate over dataset variables.""" - return iter(self._dataset) - - def __len__(self): - """Get number of variables in dataset.""" - return len(self._dataset) - - def keys(self): - """Get dataset variable names.""" - return self._dataset.keys() - - def values(self): - """Get dataset variable values.""" - return self._dataset.values() - - def items(self): - """Get dataset variable items.""" - return self._dataset.items() - - def get_deprecated_mappings(self): - """Get all currently active deprecated mappings for debugging. - - Returns: - Dict mapping deprecated names to current names for variables in this dataset. - """ - mappings = {} - - # Check all variables in dataset against all deprecation rules - for var_name in self._dataset.data_vars: - # Check if any deprecated patterns would map TO this variable - - # Direct mappings (reverse lookup) - for old_name, new_name in self.DEPRECATED_VARIABLE_MAPPING.items(): - if new_name == var_name and old_name not in self._dataset: - mappings[old_name] = var_name - - # Pattern-based rules (generate potential old names) - for old_pattern, new_pattern, condition_func in self.PATTERN_SUBSTITUTION_RULES: - if new_pattern in var_name: - potential_old_name = var_name.replace(new_pattern, old_pattern) - if condition_func(potential_old_name) and potential_old_name not in self._dataset: - mappings[potential_old_name] = var_name - - # Regex patterns (reverse lookup) - # This is complex for reverse lookup, so we'll skip for now - # Could be added if needed for debugging - - return mappings - - @property - def _raw_dataset(self): - """Access to the underlying dataset without backwards compatibility.""" - return self._dataset - - class CalculationResults: """Comprehensive container for optimization calculation results and analysis tools. @@ -355,7 +181,7 @@ def __init__( folder: Results storage folder. model: Linopy optimization model. """ - self.solution = BackwardsCompatibleDataset(solution) + self.solution = solution self.flow_system = flow_system self.summary = summary self.name = name diff --git a/tests/test_backwards_compatible_dataset.py b/tests/test_backwards_compatible_dataset.py deleted file mode 100644 index a2ae8f70b..000000000 --- a/tests/test_backwards_compatible_dataset.py +++ /dev/null @@ -1,443 +0,0 @@ -"""Unit tests for BackwardsCompatibleDataset functionality.""" - -import warnings - -import numpy as np -import pandas as pd -import pytest -import xarray as xr - -from flixopt.results import BackwardsCompatibleDataset - - -class TestBackwardsCompatibleDataset: - """Test suite for BackwardsCompatibleDataset class.""" - - @pytest.fixture - def sample_dataset(self): - """Create a sample dataset with new variable names for testing.""" - data = { - # Effect variables (new names) - 'costs': xr.DataArray([100.0], dims=['scalar']), - 'emissions': xr.DataArray([50.0], dims=['scalar']), - # Cross-effect variables with new naming - 'Boiler_01(Natural_Gas)->costs(temporal)': xr.DataArray([20.0, 25.0, 30.0], dims=['time']), - 'Boiler_01(Natural_Gas)->costs(nontemporal)': xr.DataArray([500.0], dims=['scalar']), - 'HeatPump_02(Electricity)->emissions(temporal)': xr.DataArray([5.0, 6.0, 7.0], dims=['time']), - 'Storage_01(Electricity)->emissions(nontemporal)': xr.DataArray([10.0], dims=['scalar']), - # Parameter variables with new naming - 'minimum_temporal': xr.DataArray([10.0], dims=['scalar']), - 'maximum_temporal': xr.DataArray([200.0], dims=['scalar']), - 'minimum_nontemporal': xr.DataArray([5.0], dims=['scalar']), - 'maximum_nontemporal': xr.DataArray([500.0], dims=['scalar']), - 'minimum_temporal_per_hour': xr.DataArray([1.0], dims=['scalar']), - 'maximum_temporal_per_hour': xr.DataArray([10.0], dims=['scalar']), - # Regular variables (no renaming needed) - 'flow_rate': xr.DataArray([75.0, 85.0, 95.0], dims=['time']), - 'charge_state': xr.DataArray([40.0, 60.0, 80.0], dims=['time']), - } - - coords = {'time': pd.date_range('2023-01-01', periods=3, freq='h'), 'scalar': ['value']} - - return xr.Dataset(data, coords=coords) - - @pytest.fixture - def bc_dataset(self, sample_dataset): - """Create a BackwardsCompatibleDataset instance.""" - return BackwardsCompatibleDataset(sample_dataset) - - def test_init(self, sample_dataset): - """Test BackwardsCompatibleDataset initialization.""" - bc_dataset = BackwardsCompatibleDataset(sample_dataset) - assert bc_dataset._dataset is sample_dataset - assert bc_dataset._mapping_cache == {} - - @pytest.mark.parametrize( - 'deprecated_name,expected_name', - [ - ('costs|total', 'costs'), - # Add more direct mappings here as they are added to DEPRECATED_VARIABLE_MAPPING - ], - ) - def test_direct_mapping_access(self, bc_dataset, sample_dataset, deprecated_name, expected_name): - """Test accessing variables via direct mapping.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always') - result = bc_dataset[deprecated_name] - - # Should issue exactly one deprecation warning - assert len(w) == 1 - assert issubclass(w[0].category, DeprecationWarning) - assert f"'{deprecated_name}' is deprecated" in str(w[0].message) - assert f"Use '{expected_name}' instead" in str(w[0].message) - - # Should return the correct data - expected = sample_dataset[expected_name] - assert np.array_equal(result.values, expected.values) - assert result.dims == expected.dims - - @pytest.mark.parametrize( - 'deprecated_key,expected_key', - [ - # (operation) -> (temporal) pattern substitutions - ('Boiler_01(Natural_Gas)->costs(operation)', 'Boiler_01(Natural_Gas)->costs(temporal)'), - ('HeatPump_02(Electricity)->emissions(operation)', 'HeatPump_02(Electricity)->emissions(temporal)'), - # (invest) -> (nontemporal) pattern substitutions - ('Boiler_01(Natural_Gas)->costs(invest)', 'Boiler_01(Natural_Gas)->costs(nontemporal)'), - ('Storage_01(Electricity)->emissions(invest)', 'Storage_01(Electricity)->emissions(nontemporal)'), - ], - ) - def test_pattern_substitution_cross_effects(self, bc_dataset, sample_dataset, deprecated_key, expected_key): - """Test pattern-based substitution for cross-effect variables.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always') - result = bc_dataset[deprecated_key] - - # Check warning - assert len(w) == 1 - assert issubclass(w[0].category, DeprecationWarning) - assert f"'{deprecated_key}' is deprecated" in str(w[0].message) - assert f"Use '{expected_key}' instead" in str(w[0].message) - - # Check data correctness - expected = sample_dataset[expected_key] - assert np.array_equal(result.values, expected.values) - - @pytest.mark.parametrize( - 'deprecated_name,expected_name', - [ - ('minimum_operation', 'minimum_temporal'), - ('maximum_operation', 'maximum_temporal'), - ('minimum_invest', 'minimum_nontemporal'), - ('maximum_invest', 'maximum_nontemporal'), - ('minimum_operation_per_hour', 'minimum_temporal_per_hour'), - ('maximum_operation_per_hour', 'maximum_temporal_per_hour'), - ], - ) - def test_regex_patterns(self, bc_dataset, sample_dataset, deprecated_name, expected_name): - """Test regex-based pattern matching for parameter names.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always') - result = bc_dataset[deprecated_name] - - # Check warning - assert len(w) == 1 - assert issubclass(w[0].category, DeprecationWarning) - assert f"'{deprecated_name}' is deprecated" in str(w[0].message) - assert f"Use '{expected_name}' instead" in str(w[0].message) - - # Check data correctness - expected = sample_dataset[expected_name] - assert np.array_equal(result.values, expected.values) - - def test_no_renaming_for_existing_variables(self, bc_dataset, sample_dataset): - """Test that existing variables are accessed without warnings.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always') - result = bc_dataset['flow_rate'] - - # Should not issue any warnings - assert len(w) == 0 - - # Should return correct data - expected = sample_dataset['flow_rate'] - assert np.array_equal(result.values, expected.values) - - @pytest.mark.parametrize( - 'nonexistent_key,description', - [ - ('nonexistent_variable', 'completely nonexistent variable'), - ('missing_operation', 'regex pattern that would transform but target missing'), - ('missing_invest', 'regex pattern that would transform but target missing'), - ('Component->missing(operation)', 'cross-effect pattern that would transform but target missing'), - ('Component->missing(invest)', 'cross-effect pattern that would transform but target missing'), - ('fake|mapping', 'direct mapping pattern but not in mapping dict'), - ('costs|wrong', 'partial direct mapping but wrong suffix'), - ], - ) - def test_nonexistent_variable_error(self, bc_dataset, nonexistent_key, description): - """Test that accessing nonexistent variables raises appropriate errors.""" - # Just test that KeyError is raised, don't try to match the message for complex keys - if '->' in nonexistent_key or '|' in nonexistent_key: - with pytest.raises(KeyError): - bc_dataset[nonexistent_key] - else: - with pytest.raises(KeyError, match=nonexistent_key): - bc_dataset[nonexistent_key] - - @pytest.mark.parametrize( - 'variable_name,should_exist', - [ - # Existing variables (current names) - ('costs', True), - ('emissions', True), - ('flow_rate', True), - ('charge_state', True), - ('minimum_temporal', True), - ('maximum_nontemporal', True), - # Direct mapping deprecated names - ('costs|total', True), - # Pattern substitution deprecated names - ('Boiler_01(Natural_Gas)->costs(operation)', True), - ('Storage_01(Electricity)->emissions(invest)', True), - ('HeatPump_02(Electricity)->emissions(operation)', True), - # Regex pattern deprecated names - ('minimum_operation', True), - ('maximum_invest', True), - ('minimum_operation_per_hour', True), - ('maximum_operation_per_hour', True), - # Non-existent variables - ('nonexistent_var', False), - ('another_missing(operation)', False), - ('fake_operation', False), - ('not_real_invest', False), - ('', False), # Empty string - ], - ) - def test_contains_method(self, bc_dataset, variable_name, should_exist): - """Test __contains__ method with backwards compatibility.""" - if should_exist: - assert variable_name in bc_dataset - else: - assert variable_name not in bc_dataset - - @pytest.mark.parametrize( - 'deprecated_key,expected_key,mapping_type', - [ - ('costs|total', 'costs', 'direct_mapping'), - ( - 'Boiler_01(Natural_Gas)->costs(operation)', - 'Boiler_01(Natural_Gas)->costs(temporal)', - 'pattern_substitution', - ), - ('minimum_operation', 'minimum_temporal', 'regex_pattern'), - ('maximum_invest', 'maximum_nontemporal', 'regex_pattern'), - ], - ) - def test_caching_mechanism(self, bc_dataset, deprecated_key, expected_key, mapping_type): - """Test that the mapping cache works correctly for all mapping types.""" - # Initial cache should be empty for this key - assert deprecated_key not in bc_dataset._mapping_cache - - # First access should populate cache - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - result1 = bc_dataset[deprecated_key] - - # Check that mapping was cached - assert deprecated_key in bc_dataset._mapping_cache - assert bc_dataset._mapping_cache[deprecated_key] == expected_key - - # Second access should use cache (same result, no additional processing) - cache_before = bc_dataset._mapping_cache.copy() - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - result2 = bc_dataset[deprecated_key] - - # Cache should be unchanged and results should be identical - assert bc_dataset._mapping_cache == cache_before - assert np.array_equal(result1.values, result2.values) - - def test_get_deprecated_mappings(self, bc_dataset): - """Test the get_deprecated_mappings debugging method.""" - mappings = bc_dataset.get_deprecated_mappings() - - # Should include direct mappings - assert 'costs|total' in mappings - assert mappings['costs|total'] == 'costs' - - # Check some specific pattern-based mappings that should exist - # These correspond to the actual variables in our sample dataset - expected_mappings = [ - ('Boiler_01(Natural_Gas)->costs(operation)', 'Boiler_01(Natural_Gas)->costs(temporal)'), - ('Boiler_01(Natural_Gas)->costs(invest)', 'Boiler_01(Natural_Gas)->costs(nontemporal)'), - ('HeatPump_02(Electricity)->emissions(operation)', 'HeatPump_02(Electricity)->emissions(temporal)'), - ('Storage_01(Electricity)->emissions(invest)', 'Storage_01(Electricity)->emissions(nontemporal)'), - ] - - for old_name, new_name in expected_mappings: - if new_name in bc_dataset._dataset: # Only check if target exists - assert old_name in mappings, f"Expected mapping '{old_name}' -> '{new_name}' not found" - assert mappings[old_name] == new_name - - # Should be a reasonable number of mappings - assert len(mappings) >= 3 # At least direct + some pattern mappings - - # Debug print to help understand what mappings were found - print(f'\nFound {len(mappings)} deprecated mappings:') - for old, new in mappings.items(): - print(f' {old} -> {new}') - - def test_dataset_method_delegation(self, bc_dataset, sample_dataset): - """Test that dataset methods are properly delegated.""" - # Test len - assert len(bc_dataset) == len(sample_dataset) - - # Test keys - assert list(bc_dataset.keys()) == list(sample_dataset.keys()) - - # Test iteration - bc_vars = list(bc_dataset) - dataset_vars = list(sample_dataset) - assert bc_vars == dataset_vars - - # Test values method exists - assert hasattr(bc_dataset, 'values') - - # Test items method exists - assert hasattr(bc_dataset, 'items') - - def test_raw_dataset_property(self, bc_dataset, sample_dataset): - """Test _raw_dataset property returns the original dataset.""" - assert bc_dataset._raw_dataset is sample_dataset - - def test_getattr_delegation(self, bc_dataset, sample_dataset): - """Test that unknown attributes are delegated to the wrapped dataset.""" - # Test accessing dataset attributes - assert bc_dataset.sizes == sample_dataset.sizes - assert bc_dataset.coords.keys() == sample_dataset.coords.keys() - assert list(bc_dataset.data_vars.keys()) == list(sample_dataset.data_vars.keys()) - - @pytest.mark.parametrize( - 'test_key,should_trigger_pattern,description', - [ - # Cases that should NOT trigger cross-effect pattern substitution (no ->) - ('operation_only', False, 'standalone operation word without arrow'), - ('invest_only', False, 'standalone invest word without arrow'), - ('some_operation_name', False, 'operation in middle of name without arrow'), - ('invest_cost', False, 'invest at start of name without arrow'), - # Cases that SHOULD trigger cross-effect pattern substitution (has ->) - ('NonExistent->something(operation)', True, 'has arrow and operation pattern'), - ('Component->effect(invest)', True, 'has arrow and invest pattern'), - ('A->B(operation)', True, 'minimal arrow with operation'), - ('X->Y(invest)', True, 'minimal arrow with invest'), - # Cases that should NOT trigger because missing parentheses - ('Component->operation', False, 'has arrow but no parentheses around operation'), - ('Component->invest', False, 'has arrow but no parentheses around invest'), - ], - ) - def test_pattern_condition_functions(self, bc_dataset, test_key, should_trigger_pattern, description): - """Test that pattern conditions work correctly.""" - # All these test keys should fail with KeyError since they don't exist - # But the important thing is whether they trigger pattern matching attempt - with pytest.raises(KeyError): - bc_dataset[test_key] - - # We can indirectly test if pattern was attempted by checking the cache - # If pattern was attempted but failed, no cache entry should be made - assert test_key not in bc_dataset._mapping_cache - - def test_multiple_pattern_matching(self, bc_dataset, sample_dataset): - """Test behavior when multiple patterns could potentially match.""" - # Create a variable that could match multiple patterns - # This tests the order of pattern application - - # Add a variable that has both cross-effect pattern and could match regex - sample_dataset['test_operation'] = xr.DataArray([123.0], dims=['scalar']) - bc_dataset_new = BackwardsCompatibleDataset(sample_dataset) - - # This should match the regex pattern (*_operation -> *_temporal) - # not the cross-effect pattern (since no -> in key) - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always') - _ = bc_dataset_new['test_operation'] # Should access as-is, no renaming - - # Should not issue warning since 'test_operation' exists as-is - assert len(w) == 0 - - @pytest.mark.parametrize( - 'test_input,expected_exception,description', - [ - # Empty and None inputs - ('', KeyError, 'empty string'), - (None, TypeError, 'None input'), - # Partial pattern matches that shouldn't transform - ('not_a_real_operation_var', KeyError, 'operation in name but no real match'), - ('operation_but_no_suffix', KeyError, 'operation word but not as suffix'), - ('prefix_invest_middle', KeyError, 'invest in middle but not as suffix'), - ('something_operation_something', KeyError, 'operation in middle with suffixes'), - # Special characters - ('costs|', KeyError, 'partial pipe character'), - ('|total', KeyError, 'pipe at start'), - ('costs||total', KeyError, 'double pipe'), - ('costs|total|extra', KeyError, 'extra parts after valid pattern'), - # Arrow patterns that don't match - ('->', KeyError, 'just arrow'), - ('->operation', KeyError, 'arrow at start'), - ('operation->', KeyError, 'arrow at end'), - ('->->', KeyError, 'double arrow'), - # Mixed patterns - ('costs|total(operation)', KeyError, 'mixing direct and pattern syntax'), - ('minimum_operation|total', KeyError, 'mixing regex and direct syntax'), - ], - ) - def test_edge_cases(self, bc_dataset, test_input, expected_exception, description): - """Test various edge cases.""" - with pytest.raises(expected_exception): - bc_dataset[test_input] - - def test_performance_with_many_accesses(self, bc_dataset): - """Test that caching provides performance benefits.""" - deprecated_key = 'costs|total' - - # Access the same key many times - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - for _ in range(100): - result = bc_dataset[deprecated_key] - assert result is not None - - # Should have cached the result - assert deprecated_key in bc_dataset._mapping_cache - - def test_regex_pattern_coverage(self): - """Test that regex patterns cover expected cases.""" - patterns = BackwardsCompatibleDataset._get_regex_patterns() - - # Should have patterns for operation, invest, and operation_per_hour - descriptions = [desc for _, _, desc in patterns] - assert any('operation suffix to temporal' in desc for desc in descriptions) - assert any('invest suffix to nontemporal' in desc for desc in descriptions) - assert any('operation_per_hour to temporal_per_hour' in desc for desc in descriptions) - - def test_warning_message_format(self, bc_dataset): - """Test that warning messages have the correct format.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always') - bc_dataset['costs|total'] - - warning_msg = str(w[0].message) - assert "'costs|total' is deprecated" in warning_msg - assert "Use 'costs' instead" in warning_msg - - @pytest.mark.parametrize( - 'variable_name,value,description', - [ - # Similar names that should NOT trigger renaming - ('operational_cost', 999.0, 'operational (not operation) suffix'), - ('investment_return', 888.0, 'investment (not invest) prefix'), - ('operation_mode', 777.0, 'operation as prefix not suffix'), - ('invest_strategy', 666.0, 'invest as prefix not suffix'), - ('cooperative_effort', 555.0, 'contains operation but different context'), - ('reinvestment_plan', 444.0, 'contains invest but different context'), - ('temporal_sequence', 333.0, 'temporal as prefix (target name)'), - ('nontemporal_data', 222.0, 'nontemporal as prefix (target name)'), - ], - ) - def test_no_false_positives_for_similar_names(self, bc_dataset, sample_dataset, variable_name, value, description): - """Test that similar but different variable names don't trigger false matches.""" - # Add variable with similar name that shouldn't trigger renaming - sample_dataset[variable_name] = xr.DataArray([value], dims=['scalar']) - bc_dataset_new = BackwardsCompatibleDataset(sample_dataset) - - # Should be accessed without any renaming warnings - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always') - result = bc_dataset_new[variable_name] - - # Should not trigger deprecation warnings - deprecation_warnings = [warning for warning in w if issubclass(warning.category, DeprecationWarning)] - assert len(deprecation_warnings) == 0, f'Unexpected deprecation warning for {variable_name}: {description}' - - # Should return correct value - assert result.values == value From b0716716868970842dd3d13d7ab96425ed5be3a5 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 12:13:45 +0200 Subject: [PATCH 24/27] Renamed maximum_temporal_per_hour to maximum_per_hour and minimum_temporal_per_hour to minimum_per_hour --- flixopt/effects.py | 82 ++++++++++++++++++++++---------------------- tests/conftest.py | 2 +- tests/test_effect.py | 4 +-- 3 files changed, 44 insertions(+), 44 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index 3e404ec65..918938be6 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -54,8 +54,8 @@ class Effect(Element): Maps this effect's investment values to contributions to other effects. minimum_temporal: Minimum allowed total contribution across all timesteps. maximum_temporal: Maximum allowed total contribution across all timesteps. - minimum_temporal_per_hour: Minimum allowed contribution per hour. - maximum_temporal_per_hour: Maximum allowed contribution per hour. + minimum_per_hour: Minimum allowed contribution per hour. + maximum_per_hour: Maximum allowed contribution per hour. minimum_nontemporal: Minimum allowed total nontemporal contribution. maximum_nontemporal: Maximum allowed total nontemporal contribution. minimum_total: Minimum allowed total effect (temporal + nontemporal combined). @@ -68,8 +68,8 @@ class Effect(Element): maximum_operation: Use `maximum_temporal` instead. minimum_invest: Use `minimum_nontemporal` instead. maximum_invest: Use `maximum_nontemporal` instead. - minimum_operation_per_hour: Use `minimum_temporal_per_hour` instead. - maximum_operation_per_hour: Use `maximum_temporal_per_hour` instead. + minimum_operation_per_hour: Use `minimum_per_hour` instead. + maximum_operation_per_hour: Use `maximum_per_hour` instead. Examples: Basic cost objective: @@ -119,8 +119,8 @@ class Effect(Element): label='water_consumption', unit='m³', description='Industrial water usage', - minimum_temporal_per_hour=10, # Minimum 10 m³/h for process stability - maximum_temporal_per_hour=500, # Maximum 500 m³/h capacity limit + minimum_per_hour=10, # Minimum 10 m³/h for process stability + maximum_per_hour=500, # Maximum 500 m³/h capacity limit maximum_total=100_000, # Annual permit limit: 100,000 m³ ) ``` @@ -154,8 +154,8 @@ def __init__( maximum_temporal: Scalar | None = None, minimum_nontemporal: Scalar | None = None, maximum_nontemporal: Scalar | None = None, - minimum_temporal_per_hour: NumericDataTS | None = None, - maximum_temporal_per_hour: NumericDataTS | None = None, + minimum_per_hour: NumericDataTS | None = None, + maximum_per_hour: NumericDataTS | None = None, minimum_total: Scalar | None = None, maximum_total: Scalar | None = None, **kwargs, @@ -226,31 +226,31 @@ def __init__( raise ValueError('Either maximum_invest or maximum_nontemporal can be specified, but not both.') maximum_nontemporal = maximum_invest - # Handle minimum_temporal_per_hour + # Handle minimum_per_hour if minimum_operation_per_hour is not None: warnings.warn( - "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + "Parameter 'minimum_operation_per_hour' is deprecated. Use 'minimum_per_hour' instead.", DeprecationWarning, stacklevel=2, ) - if minimum_temporal_per_hour is not None: + if minimum_per_hour is not None: raise ValueError( - 'Either minimum_operation_per_hour or minimum_temporal_per_hour can be specified, but not both.' + 'Either minimum_operation_per_hour or minimum_per_hour can be specified, but not both.' ) - minimum_temporal_per_hour = minimum_operation_per_hour + minimum_per_hour = minimum_operation_per_hour - # Handle maximum_temporal_per_hour + # Handle maximum_per_hour if maximum_operation_per_hour is not None: warnings.warn( - "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + "Parameter 'maximum_operation_per_hour' is deprecated. Use 'maximum_per_hour' instead.", DeprecationWarning, stacklevel=2, ) - if maximum_temporal_per_hour is not None: + if maximum_per_hour is not None: raise ValueError( - 'Either maximum_operation_per_hour or maximum_temporal_per_hour can be specified, but not both.' + 'Either maximum_operation_per_hour or maximum_per_hour can be specified, but not both.' ) - maximum_temporal_per_hour = maximum_operation_per_hour + maximum_per_hour = maximum_operation_per_hour # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) @@ -260,8 +260,8 @@ def __init__( self.maximum_temporal = maximum_temporal self.minimum_nontemporal = minimum_nontemporal self.maximum_nontemporal = maximum_nontemporal - self.minimum_temporal_per_hour = minimum_temporal_per_hour - self.maximum_temporal_per_hour = maximum_temporal_per_hour + self.minimum_per_hour = minimum_per_hour + self.maximum_per_hour = maximum_per_hour self.minimum_total = minimum_total self.maximum_total = maximum_total @@ -364,59 +364,59 @@ def maximum_invest(self, value): @property def minimum_operation_per_hour(self): - """DEPRECATED: Use 'minimum_temporal_per_hour' property instead.""" + """DEPRECATED: Use 'minimum_per_hour' property instead.""" import warnings warnings.warn( - "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_per_hour' instead.", DeprecationWarning, stacklevel=2, ) - return self.minimum_temporal_per_hour + return self.minimum_per_hour @minimum_operation_per_hour.setter def minimum_operation_per_hour(self, value): - """DEPRECATED: Use 'minimum_temporal_per_hour' property instead.""" + """DEPRECATED: Use 'minimum_per_hour' property instead.""" import warnings warnings.warn( - "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_temporal_per_hour' instead.", + "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_per_hour' instead.", DeprecationWarning, stacklevel=2, ) - self.minimum_temporal_per_hour = value + self.minimum_per_hour = value @property def maximum_operation_per_hour(self): - """DEPRECATED: Use 'maximum_temporal_per_hour' property instead.""" + """DEPRECATED: Use 'maximum_per_hour' property instead.""" import warnings warnings.warn( - "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_per_hour' instead.", DeprecationWarning, stacklevel=2, ) - return self.maximum_temporal_per_hour + return self.maximum_per_hour @maximum_operation_per_hour.setter def maximum_operation_per_hour(self, value): - """DEPRECATED: Use 'maximum_temporal_per_hour' property instead.""" + """DEPRECATED: Use 'maximum_per_hour' property instead.""" import warnings warnings.warn( - "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_temporal_per_hour' instead.", + "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_per_hour' instead.", DeprecationWarning, stacklevel=2, ) - self.maximum_temporal_per_hour = value + self.maximum_per_hour = value def transform_data(self, flow_system: FlowSystem): - self.minimum_temporal_per_hour = flow_system.create_time_series( - f'{self.label_full}|minimum_temporal_per_hour', self.minimum_temporal_per_hour + self.minimum_per_hour = flow_system.create_time_series( + f'{self.label_full}|minimum_per_hour', self.minimum_per_hour ) - self.maximum_temporal_per_hour = flow_system.create_time_series( - f'{self.label_full}|maximum_temporal_per_hour', - self.maximum_temporal_per_hour, + self.maximum_per_hour = flow_system.create_time_series( + f'{self.label_full}|maximum_per_hour', + self.maximum_per_hour, ) self.specific_share_to_other_effects_operation = flow_system.create_effect_time_series( @@ -459,11 +459,11 @@ def __init__(self, model: SystemModel, element: Effect): label_full=f'{self.label_full}(temporal)', total_max=self.element.maximum_temporal, total_min=self.element.minimum_temporal, - min_per_hour=self.element.minimum_temporal_per_hour.active_data - if self.element.minimum_temporal_per_hour is not None + min_per_hour=self.element.minimum_per_hour.active_data + if self.element.minimum_per_hour is not None else None, - max_per_hour=self.element.maximum_temporal_per_hour.active_data - if self.element.maximum_temporal_per_hour is not None + max_per_hour=self.element.maximum_per_hour.active_data + if self.element.maximum_per_hour is not None else None, ) ) diff --git a/tests/conftest.py b/tests/conftest.py index 124bd4cd5..a0b196e4d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -62,7 +62,7 @@ def simple_flow_system() -> fx.FlowSystem: 'kg', 'CO2_e-Emissionen', specific_share_to_other_effects_operation={costs.label: 0.2}, - maximum_temporal_per_hour=1000, + maximum_per_hour=1000, ) # Create components diff --git a/tests/test_effect.py b/tests/test_effect.py index 33125a0a0..b5fec64dd 100644 --- a/tests/test_effect.py +++ b/tests/test_effect.py @@ -60,8 +60,8 @@ def test_bounds(self, basic_flow_system_linopy): maximum_nontemporal=2.1, minimum_total=3.0, maximum_total=3.1, - minimum_temporal_per_hour=4.0, - maximum_temporal_per_hour=4.1, + minimum_per_hour=4.0, + maximum_per_hour=4.1, ) flow_system.add_elements(effect) From 02b8859b5a4c4be0c9d76d60021a130b43e46287 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 12:14:30 +0200 Subject: [PATCH 25/27] Add entires to CHANGELOG.md --- CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 46ab6270c..8c2520589 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -48,6 +48,13 @@ Please keep the format of the changelog consistent with the other releases, so t ### ♻️ Changed ### 🗑️ Deprecated +- Renamed `Effect` parameters: + - `minimum_investment` → `minimum_nontemporal` + - `maximum_investment` → `maximum_nontemporal` + - `minimum_operation` → `minimum_temporal` + - `maximum_operation` → `maximum_temporal` + - `minimum_operation_per_hour` → `minimum_per_hour` + - `maximum_operation_per_hour` → `maximum_per_hour` ### 🔥 Removed From 59b5f22368d93e54fe2fc91d598ef2f457726c12 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 12:31:15 +0200 Subject: [PATCH 26/27] Remove backwards compatible dataset --- flixopt/results.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/flixopt/results.py b/flixopt/results.py index 698217704..5a76e3e0a 100644 --- a/flixopt/results.py +++ b/flixopt/results.py @@ -4,7 +4,6 @@ import json import logging import pathlib -import warnings from typing import TYPE_CHECKING, Literal import linopy @@ -188,18 +187,13 @@ def __init__( self.model = model self.folder = pathlib.Path(folder) if folder is not None else pathlib.Path.cwd() / 'results' self.components = { - label: ComponentResults.from_json(self, infos) - for label, infos in self.solution._raw_dataset.attrs['Components'].items() + label: ComponentResults.from_json(self, infos) for label, infos in self.solution.attrs['Components'].items() } - self.buses = { - label: BusResults.from_json(self, infos) - for label, infos in self.solution._raw_dataset.attrs['Buses'].items() - } + self.buses = {label: BusResults.from_json(self, infos) for label, infos in self.solution.attrs['Buses'].items()} self.effects = { - label: EffectResults.from_json(self, infos) - for label, infos in self.solution._raw_dataset.attrs['Effects'].items() + label: EffectResults.from_json(self, infos) for label, infos in self.solution.attrs['Effects'].items() } self.timesteps_extra = self.solution.indexes['time'] @@ -252,7 +246,7 @@ def filter_solution( """ if element is not None: return filter_dataset(self[element].solution, variable_dims) - return filter_dataset(self.solution._raw_dataset, variable_dims) + return filter_dataset(self.solution, variable_dims) def plot_heatmap( self, @@ -334,7 +328,7 @@ def to_file( paths = fx_io.CalculationResultsPaths(folder, name) - fx_io.save_dataset_to_netcdf(self.solution._raw_dataset, paths.solution, compression=compression) + fx_io.save_dataset_to_netcdf(self.solution, paths.solution, compression=compression) fx_io.save_dataset_to_netcdf(self.flow_system, paths.flow_system, compression=compression) with open(paths.summary, 'w', encoding='utf-8') as f: @@ -368,7 +362,7 @@ def __init__( self._variable_names = variables self._constraint_names = constraints - self.solution = self._calculation_results.solution._raw_dataset[self._variable_names] + self.solution = self._calculation_results.solution[self._variable_names] @property def variables(self) -> linopy.Variables: From 06f20e6fe511434a206c32b47d306282f4a00891 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 27 Sep 2025 12:34:08 +0200 Subject: [PATCH 27/27] Remove unused imports --- flixopt/effects.py | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index 918938be6..ce6145401 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -172,8 +172,6 @@ def __init__( self.specific_share_to_other_effects_invest: EffectValuesUser = specific_share_to_other_effects_invest or {} # Handle backwards compatibility for deprecated parameters - import warnings - # Extract deprecated parameters from kwargs minimum_operation = kwargs.pop('minimum_operation', None) maximum_operation = kwargs.pop('maximum_operation', None) @@ -269,8 +267,6 @@ def __init__( @property def minimum_operation(self): """DEPRECATED: Use 'minimum_temporal' property instead.""" - import warnings - warnings.warn( "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", DeprecationWarning, @@ -281,8 +277,6 @@ def minimum_operation(self): @minimum_operation.setter def minimum_operation(self, value): """DEPRECATED: Use 'minimum_temporal' property instead.""" - import warnings - warnings.warn( "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", DeprecationWarning, @@ -293,8 +287,6 @@ def minimum_operation(self, value): @property def maximum_operation(self): """DEPRECATED: Use 'maximum_temporal' property instead.""" - import warnings - warnings.warn( "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", DeprecationWarning, @@ -305,8 +297,6 @@ def maximum_operation(self): @maximum_operation.setter def maximum_operation(self, value): """DEPRECATED: Use 'maximum_temporal' property instead.""" - import warnings - warnings.warn( "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", DeprecationWarning, @@ -317,8 +307,6 @@ def maximum_operation(self, value): @property def minimum_invest(self): """DEPRECATED: Use 'minimum_nontemporal' property instead.""" - import warnings - warnings.warn( "Property 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", DeprecationWarning, @@ -329,8 +317,6 @@ def minimum_invest(self): @minimum_invest.setter def minimum_invest(self, value): """DEPRECATED: Use 'minimum_nontemporal' property instead.""" - import warnings - warnings.warn( "Property 'minimum_invest' is deprecated. Use 'minimum_nontemporal' instead.", DeprecationWarning, @@ -341,8 +327,6 @@ def minimum_invest(self, value): @property def maximum_invest(self): """DEPRECATED: Use 'maximum_nontemporal' property instead.""" - import warnings - warnings.warn( "Property 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", DeprecationWarning, @@ -353,8 +337,6 @@ def maximum_invest(self): @maximum_invest.setter def maximum_invest(self, value): """DEPRECATED: Use 'maximum_nontemporal' property instead.""" - import warnings - warnings.warn( "Property 'maximum_invest' is deprecated. Use 'maximum_nontemporal' instead.", DeprecationWarning, @@ -365,8 +347,6 @@ def maximum_invest(self, value): @property def minimum_operation_per_hour(self): """DEPRECATED: Use 'minimum_per_hour' property instead.""" - import warnings - warnings.warn( "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_per_hour' instead.", DeprecationWarning, @@ -377,8 +357,6 @@ def minimum_operation_per_hour(self): @minimum_operation_per_hour.setter def minimum_operation_per_hour(self, value): """DEPRECATED: Use 'minimum_per_hour' property instead.""" - import warnings - warnings.warn( "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_per_hour' instead.", DeprecationWarning, @@ -389,8 +367,6 @@ def minimum_operation_per_hour(self, value): @property def maximum_operation_per_hour(self): """DEPRECATED: Use 'maximum_per_hour' property instead.""" - import warnings - warnings.warn( "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_per_hour' instead.", DeprecationWarning, @@ -401,8 +377,6 @@ def maximum_operation_per_hour(self): @maximum_operation_per_hour.setter def maximum_operation_per_hour(self, value): """DEPRECATED: Use 'maximum_per_hour' property instead.""" - import warnings - warnings.warn( "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_per_hour' instead.", DeprecationWarning,