From 6d9bccad2c297de372f22607d83620e4421a56eb Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Wed, 19 Nov 2025 14:05:02 +0100 Subject: [PATCH 01/49] Removing all deprectaed params and warnings --- flixopt/components.py | 95 +------ flixopt/core.py | 42 +--- flixopt/effects.py | 179 -------------- flixopt/elements.py | 67 ----- flixopt/interface.py | 185 +------------- flixopt/linear_converters.py | 464 +---------------------------------- test_deprecations.py | 318 ------------------------ 7 files changed, 4 insertions(+), 1346 deletions(-) delete mode 100644 test_deprecations.py diff --git a/flixopt/components.py b/flixopt/components.py index cf6cb4082..baba70af2 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -17,7 +17,7 @@ from .features import InvestmentModel, PiecewiseModel from .interface import InvestParameters, OnOffParameters, PiecewiseConversion from .modeling import BoundingPatterns -from .structure import DEPRECATION_REMOVAL_VERSION, FlowSystemModel, register_class_for_io +from .structure import FlowSystemModel, register_class_for_io if TYPE_CHECKING: import linopy @@ -414,14 +414,6 @@ def __init__( prevent_simultaneous_flows=[charging, discharging] if prevent_simultaneous_charge_and_discharge else None, meta_data=meta_data, ) - if isinstance(initial_charge_state, str) and initial_charge_state == 'lastValueOfSim': - warnings.warn( - f'{initial_charge_state=} is deprecated. Use "equals_final" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - initial_charge_state = 'equals_final' self.charging = charging self.discharging = discharging @@ -1095,22 +1087,7 @@ def __init__( outputs: list[Flow] | None = None, prevent_simultaneous_flow_rates: bool = True, meta_data: dict | None = None, - **kwargs, ): - # Handle deprecated parameters using centralized helper - outputs = self._handle_deprecated_kwarg(kwargs, 'source', 'outputs', outputs, transform=lambda x: [x]) - inputs = self._handle_deprecated_kwarg(kwargs, 'sink', 'inputs', inputs, transform=lambda x: [x]) - prevent_simultaneous_flow_rates = self._handle_deprecated_kwarg( - kwargs, - 'prevent_simultaneous_sink_and_source', - 'prevent_simultaneous_flow_rates', - prevent_simultaneous_flow_rates, - check_conflict=False, - ) - - # Validate any remaining unexpected kwargs - self._validate_kwargs(kwargs) - super().__init__( label, inputs=inputs, @@ -1120,36 +1097,6 @@ def __init__( ) self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates - @property - def source(self) -> Flow: - warnings.warn( - 'The source property is deprecated. Use the outputs property instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.outputs[0] - - @property - def sink(self) -> Flow: - warnings.warn( - 'The sink property is deprecated. Use the inputs property instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.inputs[0] - - @property - def prevent_simultaneous_sink_and_source(self) -> bool: - warnings.warn( - 'The prevent_simultaneous_sink_and_source property is deprecated. Use the prevent_simultaneous_flow_rates property instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.prevent_simultaneous_flow_rates - @register_class_for_io class Source(Component): @@ -1233,14 +1180,7 @@ def __init__( outputs: list[Flow] | None = None, meta_data: dict | None = None, prevent_simultaneous_flow_rates: bool = False, - **kwargs, ): - # Handle deprecated parameter using centralized helper - outputs = self._handle_deprecated_kwarg(kwargs, 'source', 'outputs', outputs, transform=lambda x: [x]) - - # Validate any remaining unexpected kwargs - self._validate_kwargs(kwargs) - self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates super().__init__( label, @@ -1249,16 +1189,6 @@ def __init__( prevent_simultaneous_flows=outputs if prevent_simultaneous_flow_rates else None, ) - @property - def source(self) -> Flow: - warnings.warn( - 'The source property is deprecated. Use the outputs property instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.outputs[0] - @register_class_for_io class Sink(Component): @@ -1343,29 +1273,16 @@ def __init__( inputs: list[Flow] | None = None, meta_data: dict | None = None, prevent_simultaneous_flow_rates: bool = False, - **kwargs, ): """Initialize a Sink (consumes flow from the system). - Supports legacy `sink=` keyword for backward compatibility (deprecated): if `sink` is provided - it is used as the single input flow and a DeprecationWarning is issued; specifying both - `inputs` and `sink` raises ValueError. - Args: label: Unique element label. inputs: Input flows for the sink. meta_data: Arbitrary metadata attached to the element. prevent_simultaneous_flow_rates: If True, prevents simultaneous nonzero flow rates across the element's inputs by wiring that restriction into the base Component setup. - - Note: - The deprecated `sink` kwarg is accepted for compatibility but will be removed in future releases. """ - # Handle deprecated parameter using centralized helper - inputs = self._handle_deprecated_kwarg(kwargs, 'sink', 'inputs', inputs, transform=lambda x: [x]) - - # Validate any remaining unexpected kwargs - self._validate_kwargs(kwargs) self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates super().__init__( @@ -1374,13 +1291,3 @@ def __init__( meta_data=meta_data, prevent_simultaneous_flows=inputs if prevent_simultaneous_flow_rates else None, ) - - @property - def sink(self) -> Flow: - warnings.warn( - 'The sink property is deprecated. Use the inputs property instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.inputs[0] diff --git a/flixopt/core.py b/flixopt/core.py index d41af7e2e..6c327ab72 100644 --- a/flixopt/core.py +++ b/flixopt/core.py @@ -18,7 +18,7 @@ """Possible dimensions of a FlowSystem.""" # Deprecation removal version - update this when planning the next major version -DEPRECATION_REMOVAL_VERSION = '5.0.0' +DEPRECATION_REMOVAL_VERSION = '6.0.0' class PlausibilityError(Exception): @@ -43,8 +43,6 @@ def __init__( *args: Any, aggregation_group: str | None = None, aggregation_weight: float | None = None, - agg_group: str | None = None, - agg_weight: float | None = None, **kwargs: Any, ): """ @@ -52,26 +50,8 @@ def __init__( *args: Arguments passed to DataArray aggregation_group: Aggregation group name aggregation_weight: Aggregation weight (0-1) - agg_group: Deprecated, use aggregation_group instead - agg_weight: Deprecated, use aggregation_weight instead **kwargs: Additional arguments passed to DataArray """ - if agg_group is not None: - warnings.warn( - f'agg_group is deprecated, use aggregation_group instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - aggregation_group = agg_group - if agg_weight is not None: - warnings.warn( - f'agg_weight is deprecated, use aggregation_weight instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - aggregation_weight = agg_weight if (aggregation_group is not None) and (aggregation_weight is not None): raise ValueError('Use either aggregation_group or aggregation_weight, not both') @@ -143,26 +123,6 @@ def __repr__(self): info_str = f'TimeSeriesData({", ".join(agg_info)})' if agg_info else 'TimeSeriesData' return f'{info_str}\n{super().__repr__()}' - @property - def agg_group(self): - warnings.warn( - f'agg_group is deprecated, use aggregation_group instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.aggregation_group - - @property - def agg_weight(self): - warnings.warn( - f'agg_weight is deprecated, use aggregation_weight instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.aggregation_weight - class DataConverter: """ diff --git a/flixopt/effects.py b/flixopt/effects.py index 02181920a..7c26dd0d6 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -202,7 +202,6 @@ def __init__( maximum_total: Numeric_PS | None = None, minimum_over_periods: Numeric_S | None = None, maximum_over_periods: Numeric_S | None = None, - **kwargs, ): super().__init__(label, meta_data=meta_data) self.unit = unit @@ -216,23 +215,6 @@ def __init__( self.share_from_temporal = share_from_temporal if share_from_temporal is not None else {} self.share_from_periodic = share_from_periodic if share_from_periodic is not None else {} - # Handle backwards compatibility for deprecated parameters using centralized helper - minimum_temporal = self._handle_deprecated_kwarg( - kwargs, 'minimum_operation', 'minimum_temporal', minimum_temporal - ) - maximum_temporal = self._handle_deprecated_kwarg( - kwargs, 'maximum_operation', 'maximum_temporal', maximum_temporal - ) - minimum_periodic = self._handle_deprecated_kwarg(kwargs, 'minimum_invest', 'minimum_periodic', minimum_periodic) - maximum_periodic = self._handle_deprecated_kwarg(kwargs, 'maximum_invest', 'maximum_periodic', maximum_periodic) - minimum_per_hour = self._handle_deprecated_kwarg( - kwargs, 'minimum_operation_per_hour', 'minimum_per_hour', minimum_per_hour - ) - maximum_per_hour = self._handle_deprecated_kwarg( - kwargs, 'maximum_operation_per_hour', 'maximum_per_hour', maximum_per_hour - ) - self._validate_kwargs(kwargs) - # Set attributes directly self.minimum_temporal = minimum_temporal self.maximum_temporal = maximum_temporal @@ -245,167 +227,6 @@ def __init__( self.minimum_over_periods = minimum_over_periods self.maximum_over_periods = maximum_over_periods - # Backwards compatible properties (deprecated) - @property - def minimum_operation(self): - """DEPRECATED: Use 'minimum_temporal' property instead.""" - warnings.warn( - "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.minimum_temporal - - @minimum_operation.setter - def minimum_operation(self, value): - """DEPRECATED: Use 'minimum_temporal' property instead.""" - warnings.warn( - "Property 'minimum_operation' is deprecated. Use 'minimum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.minimum_temporal = value - - @property - def maximum_operation(self): - """DEPRECATED: Use 'maximum_temporal' property instead.""" - warnings.warn( - "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.maximum_temporal - - @maximum_operation.setter - def maximum_operation(self, value): - """DEPRECATED: Use 'maximum_temporal' property instead.""" - warnings.warn( - "Property 'maximum_operation' is deprecated. Use 'maximum_temporal' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.maximum_temporal = value - - @property - def minimum_invest(self): - """DEPRECATED: Use 'minimum_periodic' property instead.""" - warnings.warn( - "Property 'minimum_invest' is deprecated. Use 'minimum_periodic' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.minimum_periodic - - @minimum_invest.setter - def minimum_invest(self, value): - """DEPRECATED: Use 'minimum_periodic' property instead.""" - warnings.warn( - "Property 'minimum_invest' is deprecated. Use 'minimum_periodic' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.minimum_periodic = value - - @property - def maximum_invest(self): - """DEPRECATED: Use 'maximum_periodic' property instead.""" - warnings.warn( - "Property 'maximum_invest' is deprecated. Use 'maximum_periodic' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.maximum_periodic - - @maximum_invest.setter - def maximum_invest(self, value): - """DEPRECATED: Use 'maximum_periodic' property instead.""" - warnings.warn( - "Property 'maximum_invest' is deprecated. Use 'maximum_periodic' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.maximum_periodic = value - - @property - def minimum_operation_per_hour(self): - """DEPRECATED: Use 'minimum_per_hour' property instead.""" - warnings.warn( - "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.minimum_per_hour - - @minimum_operation_per_hour.setter - def minimum_operation_per_hour(self, value): - """DEPRECATED: Use 'minimum_per_hour' property instead.""" - warnings.warn( - "Property 'minimum_operation_per_hour' is deprecated. Use 'minimum_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.minimum_per_hour = value - - @property - def maximum_operation_per_hour(self): - """DEPRECATED: Use 'maximum_per_hour' property instead.""" - warnings.warn( - "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.maximum_per_hour - - @maximum_operation_per_hour.setter - def maximum_operation_per_hour(self, value): - """DEPRECATED: Use 'maximum_per_hour' property instead.""" - warnings.warn( - "Property 'maximum_operation_per_hour' is deprecated. Use 'maximum_per_hour' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.maximum_per_hour = value - - @property - def minimum_total_per_period(self): - """DEPRECATED: Use 'minimum_total' property instead.""" - warnings.warn( - "Property 'minimum_total_per_period' is deprecated. Use 'minimum_total' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.minimum_total - - @minimum_total_per_period.setter - def minimum_total_per_period(self, value): - """DEPRECATED: Use 'minimum_total' property instead.""" - warnings.warn( - "Property 'minimum_total_per_period' is deprecated. Use 'minimum_total' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.minimum_total = value - - @property - def maximum_total_per_period(self): - """DEPRECATED: Use 'maximum_total' property instead.""" - warnings.warn( - "Property 'maximum_total_per_period' is deprecated. Use 'maximum_total' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.maximum_total - - @maximum_total_per_period.setter - def maximum_total_per_period(self, value): - """DEPRECATED: Use 'maximum_total' property instead.""" - warnings.warn( - "Property 'maximum_total_per_period' is deprecated. Use 'maximum_total' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.maximum_total = value - def transform_data(self, name_prefix: str = '') -> None: prefix = '|'.join(filter(None, [name_prefix, self.label_full])) self.minimum_per_hour = self._fit_coords(f'{prefix}|minimum_per_hour', self.minimum_per_hour) diff --git a/flixopt/elements.py b/flixopt/elements.py index 2f63e8bdb..5c1a39690 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -18,7 +18,6 @@ from .interface import InvestParameters, OnOffParameters from .modeling import BoundingPatterns, ModelingPrimitives, ModelingUtilitiesAbstract from .structure import ( - DEPRECATION_REMOVAL_VERSION, Element, ElementModel, FlowSystemModel, @@ -468,7 +467,6 @@ def __init__( load_factor_max: Numeric_PS | None = None, previous_flow_rate: Scalar | list[Scalar] | None = None, meta_data: dict | None = None, - **kwargs, ): super().__init__(label, meta_data=meta_data) self.size = CONFIG.Modeling.big if size is None else size @@ -479,26 +477,6 @@ def __init__( self.load_factor_min = load_factor_min self.load_factor_max = load_factor_max - # Handle deprecated parameters - flow_hours_max = self._handle_deprecated_kwarg( - kwargs, 'flow_hours_per_period_max', 'flow_hours_max', flow_hours_max - ) - flow_hours_min = self._handle_deprecated_kwarg( - kwargs, 'flow_hours_per_period_min', 'flow_hours_min', flow_hours_min - ) - # Also handle the older deprecated names - flow_hours_max = self._handle_deprecated_kwarg(kwargs, 'flow_hours_total_max', 'flow_hours_max', flow_hours_max) - flow_hours_min = self._handle_deprecated_kwarg(kwargs, 'flow_hours_total_min', 'flow_hours_min', flow_hours_min) - flow_hours_max_over_periods = self._handle_deprecated_kwarg( - kwargs, 'total_flow_hours_max', 'flow_hours_max_over_periods', flow_hours_max_over_periods - ) - flow_hours_min_over_periods = self._handle_deprecated_kwarg( - kwargs, 'total_flow_hours_min', 'flow_hours_min_over_periods', flow_hours_min_over_periods - ) - - # Validate any remaining unexpected kwargs - self._validate_kwargs(kwargs) - # self.positive_gradient = TimeSeries('positive_gradient', positive_gradient, self) self.effects_per_flow_hour = effects_per_flow_hour if effects_per_flow_hour is not None else {} self.flow_hours_max = flow_hours_max @@ -617,51 +595,6 @@ def size_is_fixed(self) -> bool: # Wenn kein InvestParameters existiert --> True; Wenn Investparameter, den Wert davon nehmen return False if (isinstance(self.size, InvestParameters) and self.size.fixed_size is None) else True - # Backwards compatible properties (deprecated) - @property - def flow_hours_total_max(self): - """DEPRECATED: Use 'flow_hours_max' property instead.""" - warnings.warn( - f"Property 'flow_hours_total_max' is deprecated. Use 'flow_hours_max' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.flow_hours_max - - @flow_hours_total_max.setter - def flow_hours_total_max(self, value): - """DEPRECATED: Use 'flow_hours_max' property instead.""" - warnings.warn( - f"Property 'flow_hours_total_max' is deprecated. Use 'flow_hours_max' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.flow_hours_max = value - - @property - def flow_hours_total_min(self): - """DEPRECATED: Use 'flow_hours_min' property instead.""" - warnings.warn( - f"Property 'flow_hours_total_min' is deprecated. Use 'flow_hours_min' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.flow_hours_min - - @flow_hours_total_min.setter - def flow_hours_total_min(self, value): - """DEPRECATED: Use 'flow_hours_min' property instead.""" - warnings.warn( - f"Property 'flow_hours_total_min' is deprecated. Use 'flow_hours_min' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.flow_hours_min = value - def _format_invest_params(self, params: InvestParameters) -> str: """Format InvestParameters for display.""" return f'size: {params.format_for_repr()}' diff --git a/flixopt/interface.py b/flixopt/interface.py index 55ac03b6b..ff27e48b6 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -14,7 +14,7 @@ from loguru import logger from .config import CONFIG -from .structure import DEPRECATION_REMOVAL_VERSION, Interface, register_class_for_io +from .structure import Interface, register_class_for_io if TYPE_CHECKING: # for type checking and preventing circular imports from collections.abc import Iterator @@ -730,18 +730,6 @@ class InvestParameters(Interface): linked_periods: Describes which periods are linked. 1 means linked, 0 means size=0. None means no linked periods. For convenience, pass a tuple containing the first and last period (2025, 2039), linking them and those in between - Deprecated Args: - fix_effects: **Deprecated**. Use `effects_of_investment` instead. - Will be removed in version 5.0.0. - specific_effects: **Deprecated**. Use `effects_of_investment_per_size` instead. - Will be removed in version 5.0.0. - divest_effects: **Deprecated**. Use `effects_of_retirement` instead. - Will be removed in version 5.0.0. - piecewise_effects: **Deprecated**. Use `piecewise_effects_of_investment` instead. - Will be removed in version 5.0.0. - optional: DEPRECATED. Use `mandatory` instead. Opposite of `mandatory`. - Will be removed in version 5.0.0. - Cost Annualization Requirements: All cost values must be properly weighted to match the optimization model's time horizon. For long-term investments, the cost values should be annualized to the corresponding operation time (annuity). @@ -898,36 +886,7 @@ def __init__( effects_of_retirement: Effect_PS | Numeric_PS | None = None, piecewise_effects_of_investment: PiecewiseEffects | None = None, linked_periods: Numeric_PS | tuple[int, int] | None = None, - **kwargs, ): - # Handle deprecated parameters using centralized helper - effects_of_investment = self._handle_deprecated_kwarg( - kwargs, 'fix_effects', 'effects_of_investment', effects_of_investment - ) - effects_of_investment_per_size = self._handle_deprecated_kwarg( - kwargs, 'specific_effects', 'effects_of_investment_per_size', effects_of_investment_per_size - ) - effects_of_retirement = self._handle_deprecated_kwarg( - kwargs, 'divest_effects', 'effects_of_retirement', effects_of_retirement - ) - piecewise_effects_of_investment = self._handle_deprecated_kwarg( - kwargs, 'piecewise_effects', 'piecewise_effects_of_investment', piecewise_effects_of_investment - ) - # For mandatory parameter with non-None default, disable conflict checking - if 'optional' in kwargs: - warnings.warn( - 'Deprecated parameter "optional" used. Check conflicts with new parameter "mandatory" manually! ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - mandatory = self._handle_deprecated_kwarg( - kwargs, 'optional', 'mandatory', mandatory, transform=lambda x: not x, check_conflict=False - ) - - # Validate any remaining unexpected kwargs - self._validate_kwargs(kwargs) - self.effects_of_investment = effects_of_investment if effects_of_investment is not None else {} self.effects_of_retirement = effects_of_retirement if effects_of_retirement is not None else {} self.fixed_size = fixed_size @@ -1005,74 +964,6 @@ def transform_data(self, name_prefix: str = '') -> None: ) self.fixed_size = self._fit_coords(f'{name_prefix}|fixed_size', self.fixed_size, dims=['period', 'scenario']) - @property - def optional(self) -> bool: - """DEPRECATED: Use 'mandatory' property instead. Returns the opposite of 'mandatory'.""" - import warnings - - warnings.warn( - f"Property 'optional' is deprecated. Use 'mandatory' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return not self.mandatory - - @optional.setter - def optional(self, value: bool): - """DEPRECATED: Use 'mandatory' property instead. Sets the opposite of the given value to 'mandatory'.""" - warnings.warn( - f"Property 'optional' is deprecated. Use 'mandatory' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.mandatory = not value - - @property - def fix_effects(self) -> Effect_PS | Numeric_PS: - """Deprecated property. Use effects_of_investment instead.""" - warnings.warn( - f'The fix_effects property is deprecated. Use effects_of_investment instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.effects_of_investment - - @property - def specific_effects(self) -> Effect_PS | Numeric_PS: - """Deprecated property. Use effects_of_investment_per_size instead.""" - warnings.warn( - f'The specific_effects property is deprecated. Use effects_of_investment_per_size instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.effects_of_investment_per_size - - @property - def divest_effects(self) -> Effect_PS | Numeric_PS: - """Deprecated property. Use effects_of_retirement instead.""" - warnings.warn( - f'The divest_effects property is deprecated. Use effects_of_retirement instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.effects_of_retirement - - @property - def piecewise_effects(self) -> PiecewiseEffects | None: - """Deprecated property. Use piecewise_effects_of_investment instead.""" - warnings.warn( - f'The piecewise_effects property is deprecated. Use piecewise_effects_of_investment instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.piecewise_effects_of_investment - @property def minimum_or_fixed_size(self) -> Numeric_PS: return self.fixed_size if self.fixed_size is not None else self.minimum_size @@ -1308,14 +1199,7 @@ def __init__( consecutive_off_hours_max: Numeric_TPS | None = None, switch_on_max: Numeric_PS | None = None, force_switch_on: bool = False, - **kwargs, ): - # Handle deprecated parameters - on_hours_min = self._handle_deprecated_kwarg(kwargs, 'on_hours_total_min', 'on_hours_min', on_hours_min) - on_hours_max = self._handle_deprecated_kwarg(kwargs, 'on_hours_total_max', 'on_hours_max', on_hours_max) - switch_on_max = self._handle_deprecated_kwarg(kwargs, 'switch_on_total_max', 'switch_on_max', switch_on_max) - self._validate_kwargs(kwargs) - self.effects_per_switch_on = effects_per_switch_on if effects_per_switch_on is not None else {} self.effects_per_running_hour = effects_per_running_hour if effects_per_running_hour is not None else {} self.on_hours_min = on_hours_min @@ -1388,70 +1272,3 @@ def use_switch_on(self) -> bool: self.switch_on_max, ] ) - - # Backwards compatible properties (deprecated) - @property - def on_hours_total_min(self): - """DEPRECATED: Use 'on_hours_min' property instead.""" - warnings.warn( - f"Property 'on_hours_total_min' is deprecated. Use 'on_hours_min' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.on_hours_min - - @on_hours_total_min.setter - def on_hours_total_min(self, value): - """DEPRECATED: Use 'on_hours_min' property instead.""" - warnings.warn( - f"Property 'on_hours_total_min' is deprecated. Use 'on_hours_min' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.on_hours_min = value - - @property - def on_hours_total_max(self): - """DEPRECATED: Use 'on_hours_max' property instead.""" - warnings.warn( - f"Property 'on_hours_total_max' is deprecated. Use 'on_hours_max' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.on_hours_max - - @on_hours_total_max.setter - def on_hours_total_max(self, value): - """DEPRECATED: Use 'on_hours_max' property instead.""" - warnings.warn( - f"Property 'on_hours_total_max' is deprecated. Use 'on_hours_max' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.on_hours_max = value - - @property - def switch_on_total_max(self): - """DEPRECATED: Use 'switch_on_max' property instead.""" - warnings.warn( - f"Property 'switch_on_total_max' is deprecated. Use 'switch_on_max' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.switch_on_max - - @switch_on_total_max.setter - def switch_on_total_max(self, value): - """DEPRECATED: Use 'switch_on_max' property instead.""" - warnings.warn( - f"Property 'switch_on_total_max' is deprecated. Use 'switch_on_max' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.switch_on_max = value diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 52c52463b..887d3518f 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -12,7 +12,7 @@ from .components import LinearConverter from .core import TimeSeriesData -from .structure import DEPRECATION_REMOVAL_VERSION, register_class_for_io +from .structure import register_class_for_io if TYPE_CHECKING: from .elements import Flow @@ -38,9 +38,6 @@ class Boiler(LinearConverter): on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. - eta: *Deprecated*. Use `thermal_efficiency` instead. - Q_fu: *Deprecated*. Use `fuel_flow` instead. - Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Natural gas boiler: @@ -84,14 +81,7 @@ def __init__( thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, - **kwargs, ): - # Handle deprecated parameters - fuel_flow = self._handle_deprecated_kwarg(kwargs, 'Q_fu', 'fuel_flow', fuel_flow) - thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) - thermal_efficiency = self._handle_deprecated_kwarg(kwargs, 'eta', 'thermal_efficiency', thermal_efficiency) - self._validate_kwargs(kwargs) - # Validate required parameters if fuel_flow is None: raise ValueError(f"'{label}': fuel_flow is required and cannot be None") @@ -120,66 +110,6 @@ def thermal_efficiency(self, value): check_bounds(value, 'thermal_efficiency', self.label_full, 0, 1) self.conversion_factors = [{self.fuel_flow.label: value, self.thermal_flow.label: 1}] - @property - def eta(self) -> Numeric_TPS: - warnings.warn( - 'The "eta" property is deprecated. Use "thermal_efficiency" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.thermal_efficiency - - @eta.setter - def eta(self, value: Numeric_TPS) -> None: - warnings.warn( - 'The "eta" property is deprecated. Use "thermal_efficiency" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.thermal_efficiency = value - - @property - def Q_fu(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "Q_fu" property is deprecated. Use "fuel_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.fuel_flow - - @Q_fu.setter - def Q_fu(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "Q_fu" property is deprecated. Use "fuel_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.fuel_flow = value - - @property - def Q_th(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.thermal_flow - - @Q_th.setter - def Q_th(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.thermal_flow = value - @register_class_for_io class Power2Heat(LinearConverter): @@ -201,9 +131,6 @@ class Power2Heat(LinearConverter): on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. - eta: *Deprecated*. Use `thermal_efficiency` instead. - P_el: *Deprecated*. Use `electrical_flow` instead. - Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Electric resistance heater: @@ -249,14 +176,7 @@ def __init__( thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, - **kwargs, ): - # Handle deprecated parameters - electrical_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'electrical_flow', electrical_flow) - thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) - thermal_efficiency = self._handle_deprecated_kwarg(kwargs, 'eta', 'thermal_efficiency', thermal_efficiency) - self._validate_kwargs(kwargs) - # Validate required parameters if electrical_flow is None: raise ValueError(f"'{label}': electrical_flow is required and cannot be None") @@ -286,66 +206,6 @@ def thermal_efficiency(self, value): check_bounds(value, 'thermal_efficiency', self.label_full, 0, 1) self.conversion_factors = [{self.electrical_flow.label: value, self.thermal_flow.label: 1}] - @property - def eta(self) -> Numeric_TPS: - warnings.warn( - 'The "eta" property is deprecated. Use "thermal_efficiency" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.thermal_efficiency - - @eta.setter - def eta(self, value: Numeric_TPS) -> None: - warnings.warn( - 'The "eta" property is deprecated. Use "thermal_efficiency" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.thermal_efficiency = value - - @property - def P_el(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.electrical_flow - - @P_el.setter - def P_el(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.electrical_flow = value - - @property - def Q_th(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.thermal_flow - - @Q_th.setter - def Q_th(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.thermal_flow = value - @register_class_for_io class HeatPump(LinearConverter): @@ -367,9 +227,6 @@ class HeatPump(LinearConverter): on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. - COP: *Deprecated*. Use `cop` instead. - P_el: *Deprecated*. Use `electrical_flow` instead. - Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Air-source heat pump with constant COP: @@ -414,14 +271,7 @@ def __init__( thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, - **kwargs, ): - # Handle deprecated parameters - electrical_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'electrical_flow', electrical_flow) - thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) - cop = self._handle_deprecated_kwarg(kwargs, 'COP', 'cop', cop) - self._validate_kwargs(kwargs) - # Validate required parameters if electrical_flow is None: raise ValueError(f"'{label}': electrical_flow is required and cannot be None") @@ -451,64 +301,6 @@ def cop(self, value): check_bounds(value, 'cop', self.label_full, 1, 20) self.conversion_factors = [{self.electrical_flow.label: value, self.thermal_flow.label: 1}] - @property - def COP(self) -> Numeric_TPS: # noqa: N802 - warnings.warn( - f'The "COP" property is deprecated. Use "cop" instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.cop - - @COP.setter - def COP(self, value: Numeric_TPS) -> None: # noqa: N802 - warnings.warn( - f'The "COP" property is deprecated. Use "cop" instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.cop = value - - @property - def P_el(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.electrical_flow - - @P_el.setter - def P_el(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.electrical_flow = value - - @property - def Q_th(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.thermal_flow - - @Q_th.setter - def Q_th(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.thermal_flow = value - @register_class_for_io class CoolingTower(LinearConverter): @@ -530,8 +322,6 @@ class CoolingTower(LinearConverter): on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. - P_el: *Deprecated*. Use `electrical_flow` instead. - Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Industrial cooling tower: @@ -578,13 +368,7 @@ def __init__( thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, - **kwargs, ): - # Handle deprecated parameters - electrical_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'electrical_flow', electrical_flow) - thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) - self._validate_kwargs(kwargs) - # Validate required parameters if electrical_flow is None: raise ValueError(f"'{label}': electrical_flow is required and cannot be None") @@ -612,46 +396,6 @@ def specific_electricity_demand(self, value): check_bounds(value, 'specific_electricity_demand', self.label_full, 0, 1) self.conversion_factors = [{self.electrical_flow.label: -1, self.thermal_flow.label: value}] - @property - def P_el(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.electrical_flow - - @P_el.setter - def P_el(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.electrical_flow = value - - @property - def Q_th(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.thermal_flow - - @Q_th.setter - def Q_th(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.thermal_flow = value - @register_class_for_io class CHP(LinearConverter): @@ -675,11 +419,6 @@ class CHP(LinearConverter): on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. - eta_th: *Deprecated*. Use `thermal_efficiency` instead. - eta_el: *Deprecated*. Use `electrical_efficiency` instead. - Q_fu: *Deprecated*. Use `fuel_flow` instead. - P_el: *Deprecated*. Use `electrical_flow` instead. - Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Natural gas CHP unit: @@ -733,18 +472,7 @@ def __init__( thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, - **kwargs, ): - # Handle deprecated parameters - fuel_flow = self._handle_deprecated_kwarg(kwargs, 'Q_fu', 'fuel_flow', fuel_flow) - electrical_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'electrical_flow', electrical_flow) - thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) - thermal_efficiency = self._handle_deprecated_kwarg(kwargs, 'eta_th', 'thermal_efficiency', thermal_efficiency) - electrical_efficiency = self._handle_deprecated_kwarg( - kwargs, 'eta_el', 'electrical_efficiency', electrical_efficiency - ) - self._validate_kwargs(kwargs) - # Validate required parameters if fuel_flow is None: raise ValueError(f"'{label}': fuel_flow is required and cannot be None") @@ -798,106 +526,6 @@ def electrical_efficiency(self, value): check_bounds(value, 'electrical_efficiency', self.label_full, 0, 1) self.conversion_factors[1] = {self.fuel_flow.label: value, self.electrical_flow.label: 1} - @property - def eta_th(self) -> Numeric_TPS: - warnings.warn( - 'The "eta_th" property is deprecated. Use "thermal_efficiency" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.thermal_efficiency - - @eta_th.setter - def eta_th(self, value: Numeric_TPS) -> None: - warnings.warn( - 'The "eta_th" property is deprecated. Use "thermal_efficiency" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.thermal_efficiency = value - - @property - def eta_el(self) -> Numeric_TPS: - warnings.warn( - 'The "eta_el" property is deprecated. Use "electrical_efficiency" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.electrical_efficiency - - @eta_el.setter - def eta_el(self, value: Numeric_TPS) -> None: - warnings.warn( - 'The "eta_el" property is deprecated. Use "electrical_efficiency" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.electrical_efficiency = value - - @property - def Q_fu(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "Q_fu" property is deprecated. Use "fuel_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.fuel_flow - - @Q_fu.setter - def Q_fu(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "Q_fu" property is deprecated. Use "fuel_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.fuel_flow = value - - @property - def P_el(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.electrical_flow - - @P_el.setter - def P_el(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.electrical_flow = value - - @property - def Q_th(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.thermal_flow - - @Q_th.setter - def Q_th(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.thermal_flow = value - @register_class_for_io class HeatPumpWithSource(LinearConverter): @@ -921,10 +549,6 @@ class HeatPumpWithSource(LinearConverter): on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. - COP: *Deprecated*. Use `cop` instead. - P_el: *Deprecated*. Use `electrical_flow` instead. - Q_ab: *Deprecated*. Use `heat_source_flow` instead. - Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Ground-source heat pump with explicit ground coupling: @@ -978,15 +602,7 @@ def __init__( thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, - **kwargs, ): - # Handle deprecated parameters - electrical_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'electrical_flow', electrical_flow) - heat_source_flow = self._handle_deprecated_kwarg(kwargs, 'Q_ab', 'heat_source_flow', heat_source_flow) - thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) - cop = self._handle_deprecated_kwarg(kwargs, 'COP', 'cop', cop) - self._validate_kwargs(kwargs) - # Validate required parameters if electrical_flow is None: raise ValueError(f"'{label}': electrical_flow is required and cannot be None") @@ -1023,84 +639,6 @@ def cop(self, value): {self.heat_source_flow.label: value / (value - 1), self.thermal_flow.label: 1}, ] - @property - def COP(self) -> Numeric_TPS: # noqa: N802 - warnings.warn( - f'The "COP" property is deprecated. Use "cop" instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.cop - - @COP.setter - def COP(self, value: Numeric_TPS) -> None: # noqa: N802 - warnings.warn( - f'The "COP" property is deprecated. Use "cop" instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.cop = value - - @property - def P_el(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.electrical_flow - - @P_el.setter - def P_el(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "P_el" property is deprecated. Use "electrical_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.electrical_flow = value - - @property - def Q_ab(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "Q_ab" property is deprecated. Use "heat_source_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.heat_source_flow - - @Q_ab.setter - def Q_ab(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "Q_ab" property is deprecated. Use "heat_source_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.heat_source_flow = value - - @property - def Q_th(self) -> Flow: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.thermal_flow - - @Q_th.setter - def Q_th(self, value: Flow) -> None: # noqa: N802 - warnings.warn( - 'The "Q_th" property is deprecated. Use "thermal_flow" instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.thermal_flow = value - def check_bounds( value: Numeric_TPS, diff --git a/test_deprecations.py b/test_deprecations.py deleted file mode 100644 index 6cd59b678..000000000 --- a/test_deprecations.py +++ /dev/null @@ -1,318 +0,0 @@ -"""Comprehensive pytest-based test for all deprecation warnings with v5.0.0 removal message.""" - -import warnings - -import pytest - -import flixopt as fx -from flixopt.core import DEPRECATION_REMOVAL_VERSION -from flixopt.linear_converters import CHP, Boiler, HeatPump, HeatPumpWithSource, Power2Heat - - -# === Parameter deprecations (via _handle_deprecated_kwarg) === -@pytest.mark.parametrize( - 'name,factory', - [ - ("Source 'source'", lambda: fx.Source('s1', source=fx.Flow('out1', bus='bus', size=10))), - ("Sink 'sink'", lambda: fx.Sink('sink1', sink=fx.Flow('in2', bus='bus', size=10))), - ("InvestParameters 'fix_effects'", lambda: fx.InvestParameters(minimum_size=10, fix_effects={'costs': 100})), - ( - "InvestParameters 'specific_effects'", - lambda: fx.InvestParameters(minimum_size=10, specific_effects={'costs': 10}), - ), - ( - "InvestParameters 'divest_effects'", - lambda: fx.InvestParameters(minimum_size=10, divest_effects={'costs': 50}), - ), - ( - "InvestParameters 'piecewise_effects'", - lambda: fx.InvestParameters(minimum_size=10, piecewise_effects=[]), - ), - ("InvestParameters 'optional'", lambda: fx.InvestParameters(minimum_size=10, optional=True)), - ("OnOffParameters 'on_hours_total_min'", lambda: fx.OnOffParameters(on_hours_total_min=10)), - ("OnOffParameters 'on_hours_total_max'", lambda: fx.OnOffParameters(on_hours_total_max=20)), - ("OnOffParameters 'switch_on_total_max'", lambda: fx.OnOffParameters(switch_on_total_max=5)), - ("Flow 'flow_hours_total_min'", lambda: fx.Flow('f1', bus='bus', size=10, flow_hours_total_min=5)), - ("Flow 'flow_hours_total_max'", lambda: fx.Flow('f2', bus='bus', size=10, flow_hours_total_max=20)), - ( - "Flow 'flow_hours_per_period_min'", - lambda: fx.Flow('f3', bus='bus', size=10, flow_hours_per_period_min=5), - ), - ( - "Flow 'flow_hours_per_period_max'", - lambda: fx.Flow('f4', bus='bus', size=10, flow_hours_per_period_max=20), - ), - ("Flow 'total_flow_hours_min'", lambda: fx.Flow('f5', bus='bus', size=10, total_flow_hours_min=5)), - ("Flow 'total_flow_hours_max'", lambda: fx.Flow('f6', bus='bus', size=10, total_flow_hours_max=20)), - ( - "Effect 'minimum_operation'", - lambda: fx.Effect('e1', unit='€', description='test', minimum_operation=100), - ), - ( - "Effect 'maximum_operation'", - lambda: fx.Effect('e2', unit='€', description='test', maximum_operation=200), - ), - ("Effect 'minimum_invest'", lambda: fx.Effect('e3', unit='€', description='test', minimum_invest=50)), - ("Effect 'maximum_invest'", lambda: fx.Effect('e4', unit='€', description='test', maximum_invest=150)), - ( - "Effect 'minimum_operation_per_hour'", - lambda: fx.Effect('e5', unit='€', description='test', minimum_operation_per_hour=10), - ), - ( - "Effect 'maximum_operation_per_hour'", - lambda: fx.Effect('e6', unit='€', description='test', maximum_operation_per_hour=30), - ), - # Linear converters - ( - "Boiler 'Q_fu'", - lambda: Boiler( - 'b1', Q_fu=fx.Flow('f1', 'bus', 10), thermal_flow=fx.Flow('h1', 'bus', 9), thermal_efficiency=0.9 - ), - ), - ( - "Boiler 'Q_th'", - lambda: Boiler( - 'b2', fuel_flow=fx.Flow('f2', 'bus', 10), Q_th=fx.Flow('h2', 'bus', 9), thermal_efficiency=0.9 - ), - ), - ( - "Boiler 'eta'", - lambda: Boiler('b3', fuel_flow=fx.Flow('f3', 'bus', 10), thermal_flow=fx.Flow('h3', 'bus', 9), eta=0.9), - ), - ( - "Power2Heat 'P_el'", - lambda: Power2Heat( - 'p1', P_el=fx.Flow('e1', 'bus', 10), thermal_flow=fx.Flow('h4', 'bus', 9), thermal_efficiency=0.9 - ), - ), - ( - "Power2Heat 'Q_th'", - lambda: Power2Heat( - 'p2', electrical_flow=fx.Flow('e2', 'bus', 10), Q_th=fx.Flow('h5', 'bus', 9), thermal_efficiency=0.9 - ), - ), - ( - "Power2Heat 'eta'", - lambda: Power2Heat( - 'p3', electrical_flow=fx.Flow('e3', 'bus', 10), thermal_flow=fx.Flow('h6', 'bus', 9), eta=0.9 - ), - ), - ( - "HeatPump 'P_el'", - lambda: HeatPump('hp1', P_el=fx.Flow('e4', 'bus', 10), thermal_flow=fx.Flow('h7', 'bus', 30), cop=3.0), - ), - ( - "HeatPump 'Q_th'", - lambda: HeatPump('hp2', electrical_flow=fx.Flow('e5', 'bus', 10), Q_th=fx.Flow('h8', 'bus', 30), cop=3.0), - ), - ( - "HeatPump 'COP'", - lambda: HeatPump( - 'hp3', electrical_flow=fx.Flow('e6', 'bus', 10), thermal_flow=fx.Flow('h9', 'bus', 30), COP=3.0 - ), - ), - ( - "CHP 'Q_fu'", - lambda: CHP( - 'chp1', - Q_fu=fx.Flow('f4', 'bus', 100), - electrical_flow=fx.Flow('e7', 'bus', 30), - thermal_flow=fx.Flow('h10', 'bus', 60), - thermal_efficiency=0.6, - electrical_efficiency=0.3, - ), - ), - ( - "CHP 'P_el'", - lambda: CHP( - 'chp2', - fuel_flow=fx.Flow('f5', 'bus', 100), - P_el=fx.Flow('e8', 'bus', 30), - thermal_flow=fx.Flow('h11', 'bus', 60), - thermal_efficiency=0.6, - electrical_efficiency=0.3, - ), - ), - ( - "CHP 'Q_th'", - lambda: CHP( - 'chp3', - fuel_flow=fx.Flow('f6', 'bus', 100), - electrical_flow=fx.Flow('e9', 'bus', 30), - Q_th=fx.Flow('h12', 'bus', 60), - thermal_efficiency=0.6, - electrical_efficiency=0.3, - ), - ), - ( - "CHP 'eta_th'", - lambda: CHP( - 'chp4', - fuel_flow=fx.Flow('f7', 'bus', 100), - electrical_flow=fx.Flow('e10', 'bus', 30), - thermal_flow=fx.Flow('h13', 'bus', 60), - eta_th=0.6, - electrical_efficiency=0.3, - ), - ), - ( - "CHP 'eta_el'", - lambda: CHP( - 'chp5', - fuel_flow=fx.Flow('f8', 'bus', 100), - electrical_flow=fx.Flow('e11', 'bus', 30), - thermal_flow=fx.Flow('h14', 'bus', 60), - thermal_efficiency=0.6, - eta_el=0.3, - ), - ), - ( - "HeatPumpWithSource 'COP'", - lambda: HeatPumpWithSource( - 'hps1', - electrical_flow=fx.Flow('e12', 'bus', 10), - heat_source_flow=fx.Flow('hs1', 'bus', 20), - thermal_flow=fx.Flow('h15', 'bus', 30), - COP=3.0, - ), - ), - ( - "HeatPumpWithSource 'P_el'", - lambda: HeatPumpWithSource( - 'hps2', - P_el=fx.Flow('e13', 'bus', 10), - heat_source_flow=fx.Flow('hs2', 'bus', 20), - thermal_flow=fx.Flow('h16', 'bus', 30), - cop=3.0, - ), - ), - ( - "HeatPumpWithSource 'Q_ab'", - lambda: HeatPumpWithSource( - 'hps3', - electrical_flow=fx.Flow('e14', 'bus', 10), - Q_ab=fx.Flow('hs3', 'bus', 20), - thermal_flow=fx.Flow('h17', 'bus', 30), - cop=3.0, - ), - ), - ( - "HeatPumpWithSource 'Q_th'", - lambda: HeatPumpWithSource( - 'hps4', - electrical_flow=fx.Flow('e15', 'bus', 10), - heat_source_flow=fx.Flow('hs4', 'bus', 20), - Q_th=fx.Flow('h18', 'bus', 30), - cop=3.0, - ), - ), - ], - ids=lambda x: x if isinstance(x, str) else '', -) -def test_parameter_deprecations(name, factory): - """Test all parameter deprecations include removal version message.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - factory() - assert len(w) > 0, f'No warning raised for {name}' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message), ( - f'Missing removal version in {name}' - ) - - -# === Property deprecations === -@pytest.fixture(scope='module') -def deprecated_instances(): - """Create instances for property testing.""" - return { - 'data': fx.TimeSeriesData([1, 2, 3], aggregation_group=1), - 'boiler': Boiler( - 'b_prop', fuel_flow=fx.Flow('f_p', 'bus', 10), thermal_flow=fx.Flow('h_p', 'bus', 9), thermal_efficiency=0.9 - ), - 'invest_with_effects': fx.InvestParameters( - minimum_size=10, - maximum_size=100, - mandatory=False, - effects_of_investment={'costs': 100}, - effects_of_investment_per_size={'costs': 10}, - effects_of_retirement={'costs': 50}, - piecewise_effects_of_investment=None, - ), - 'invest': fx.InvestParameters(minimum_size=10, maximum_size=100, mandatory=False), - 'onoff': fx.OnOffParameters( - on_hours_min=5, - on_hours_max=10, - switch_on_max=3, - ), - 'flow': fx.Flow('f_prop', bus='bus', size=10, flow_hours_min=5, flow_hours_max=20), - 'chp': CHP( - 'chp_prop', - fuel_flow=fx.Flow('f_chp', 'bus', 100), - electrical_flow=fx.Flow('e_chp', 'bus', 30), - thermal_flow=fx.Flow('h_chp', 'bus', 60), - thermal_efficiency=0.6, - electrical_efficiency=0.3, - ), - 'hp': HeatPump( - 'hp_prop', electrical_flow=fx.Flow('e_hp', 'bus', 10), thermal_flow=fx.Flow('h_hp', 'bus', 30), cop=3.0 - ), - 'hps': HeatPumpWithSource( - 'hps_prop', - electrical_flow=fx.Flow('e_hps', 'bus', 10), - heat_source_flow=fx.Flow('hs_hps', 'bus', 20), - thermal_flow=fx.Flow('h_hps', 'bus', 30), - cop=3.0, - ), - } - - -@pytest.mark.parametrize( - 'name,accessor', - [ - # TimeSeriesData properties - ('TimeSeriesData.agg_group', lambda objs: objs['data'].agg_group), - ('TimeSeriesData.agg_weight', lambda objs: objs['data'].agg_weight), - # InvestParameters properties - ('InvestParameters.optional', lambda objs: objs['invest'].optional), - ('InvestParameters.fix_effects', lambda objs: objs['invest_with_effects'].fix_effects), - ('InvestParameters.specific_effects', lambda objs: objs['invest_with_effects'].specific_effects), - ('InvestParameters.divest_effects', lambda objs: objs['invest_with_effects'].divest_effects), - ('InvestParameters.piecewise_effects', lambda objs: objs['invest_with_effects'].piecewise_effects), - # OnOffParameters properties - ('OnOffParameters.on_hours_total_min', lambda objs: objs['onoff'].on_hours_total_min), - ('OnOffParameters.on_hours_total_max', lambda objs: objs['onoff'].on_hours_total_max), - ('OnOffParameters.switch_on_total_max', lambda objs: objs['onoff'].switch_on_total_max), - # Flow properties - ('Flow.flow_hours_total_min', lambda objs: objs['flow'].flow_hours_total_min), - ('Flow.flow_hours_total_max', lambda objs: objs['flow'].flow_hours_total_max), - # Boiler properties - ('Boiler.eta', lambda objs: objs['boiler'].eta), - ('Boiler.Q_fu', lambda objs: objs['boiler'].Q_fu), - ('Boiler.Q_th', lambda objs: objs['boiler'].Q_th), - # CHP properties - ('CHP.eta_th', lambda objs: objs['chp'].eta_th), - ('CHP.eta_el', lambda objs: objs['chp'].eta_el), - ('CHP.Q_fu', lambda objs: objs['chp'].Q_fu), - ('CHP.P_el', lambda objs: objs['chp'].P_el), - ('CHP.Q_th', lambda objs: objs['chp'].Q_th), - # HeatPump properties - ('HeatPump.COP', lambda objs: objs['hp'].COP), - ('HeatPump.P_el', lambda objs: objs['hp'].P_el), - ('HeatPump.Q_th', lambda objs: objs['hp'].Q_th), - # HeatPumpWithSource properties - ('HeatPumpWithSource.COP', lambda objs: objs['hps'].COP), - ('HeatPumpWithSource.P_el', lambda objs: objs['hps'].P_el), - ('HeatPumpWithSource.Q_ab', lambda objs: objs['hps'].Q_ab), - ('HeatPumpWithSource.Q_th', lambda objs: objs['hps'].Q_th), - ], - ids=lambda x: x if isinstance(x, str) else '', -) -def test_property_deprecations(name, accessor, deprecated_instances): - """Test all property deprecations include removal version message.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - accessor(deprecated_instances) - assert len(w) > 0, f'No warning raised for {name}' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message), ( - f'Missing removal version in {name}' - ) From d4a2e7452f2dcad9d292b7c08256e6e3c19c5d9d Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Wed, 19 Nov 2025 14:11:13 +0100 Subject: [PATCH 02/49] Update tests --- tests/test_flow_system_resample.py | 2 +- tests/test_invest_parameters_deprecation.py | 344 -------------------- 2 files changed, 1 insertion(+), 345 deletions(-) delete mode 100644 tests/test_invest_parameters_deprecation.py diff --git a/tests/test_flow_system_resample.py b/tests/test_flow_system_resample.py index 8946dd02f..551fcf483 100644 --- a/tests/test_flow_system_resample.py +++ b/tests/test_flow_system_resample.py @@ -172,7 +172,7 @@ def test_converter_resample(complex_fs): fs_r = complex_fs.resample('4h', method='mean') assert 'boiler' in fs_r.components boiler = fs_r.components['boiler'] - assert hasattr(boiler, 'eta') + assert hasattr(boiler, 'thermal_efficiency') def test_invest_resample(complex_fs): diff --git a/tests/test_invest_parameters_deprecation.py b/tests/test_invest_parameters_deprecation.py deleted file mode 100644 index 438d7f4b8..000000000 --- a/tests/test_invest_parameters_deprecation.py +++ /dev/null @@ -1,344 +0,0 @@ -""" -Test backward compatibility and deprecation warnings for InvestParameters. - -This test verifies that: -1. Old parameter names (fix_effects, specific_effects, divest_effects, piecewise_effects) still work with warnings -2. New parameter names (effects_of_investment, effects_of_investment_per_size, effects_of_retirement, piecewise_effects_of_investment) work correctly -3. Both old and new approaches produce equivalent results -""" - -import warnings - -import pytest - -from flixopt.interface import InvestParameters - - -class TestInvestParametersDeprecation: - """Test suite for InvestParameters parameter deprecation.""" - - def test_new_parameters_no_warnings(self): - """Test that new parameter names don't trigger warnings.""" - with warnings.catch_warnings(): - warnings.simplefilter('error', DeprecationWarning) - # Should not raise DeprecationWarning - params = InvestParameters( - fixed_size=100, - effects_of_investment={'cost': 25000}, - effects_of_investment_per_size={'cost': 1200}, - effects_of_retirement={'cost': 5000}, - ) - assert params.effects_of_investment == {'cost': 25000} - assert params.effects_of_investment_per_size == {'cost': 1200} - assert params.effects_of_retirement == {'cost': 5000} - - def test_old_fix_effects_deprecation_warning(self): - """Test that fix_effects triggers deprecation warning.""" - with pytest.warns(DeprecationWarning, match='fix_effects.*deprecated.*effects_of_investment'): - params = InvestParameters(fix_effects={'cost': 25000}) - # Verify backward compatibility - assert params.effects_of_investment == {'cost': 25000} - - # Accessing the property also triggers warning - with pytest.warns(DeprecationWarning, match='fix_effects.*deprecated.*effects_of_investment'): - assert params.fix_effects == {'cost': 25000} - - def test_old_specific_effects_deprecation_warning(self): - """Test that specific_effects triggers deprecation warning.""" - with pytest.warns(DeprecationWarning, match='specific_effects.*deprecated.*effects_of_investment_per_size'): - params = InvestParameters(specific_effects={'cost': 1200}) - # Verify backward compatibility - assert params.effects_of_investment_per_size == {'cost': 1200} - - # Accessing the property also triggers warning - with pytest.warns(DeprecationWarning, match='specific_effects.*deprecated.*effects_of_investment_per_size'): - assert params.specific_effects == {'cost': 1200} - - def test_old_divest_effects_deprecation_warning(self): - """Test that divest_effects triggers deprecation warning.""" - with pytest.warns(DeprecationWarning, match='divest_effects.*deprecated.*effects_of_retirement'): - params = InvestParameters(divest_effects={'cost': 5000}) - # Verify backward compatibility - assert params.effects_of_retirement == {'cost': 5000} - - # Accessing the property also triggers warning - with pytest.warns(DeprecationWarning, match='divest_effects.*deprecated.*effects_of_retirement'): - assert params.divest_effects == {'cost': 5000} - - def test_old_piecewise_effects_deprecation_warning(self): - """Test that piecewise_effects triggers deprecation warning.""" - from flixopt.interface import Piece, Piecewise, PiecewiseEffects - - test_piecewise = PiecewiseEffects( - piecewise_origin=Piecewise([Piece(0, 100)]), - piecewise_shares={'cost': Piecewise([Piece(800, 600)])}, - ) - with pytest.warns(DeprecationWarning, match='piecewise_effects.*deprecated.*piecewise_effects_of_investment'): - params = InvestParameters(piecewise_effects=test_piecewise) - # Verify backward compatibility - assert params.piecewise_effects_of_investment is test_piecewise - - # Accessing the property also triggers warning - with pytest.warns(DeprecationWarning, match='piecewise_effects.*deprecated.*piecewise_effects_of_investment'): - assert params.piecewise_effects is test_piecewise - - def test_all_old_parameters_together(self): - """Test all old parameters work together with warnings.""" - from flixopt.interface import Piece, Piecewise, PiecewiseEffects - - test_piecewise = PiecewiseEffects( - piecewise_origin=Piecewise([Piece(0, 100)]), - piecewise_shares={'cost': Piecewise([Piece(800, 600)])}, - ) - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - params = InvestParameters( - fixed_size=100, - fix_effects={'cost': 25000}, - specific_effects={'cost': 1200}, - divest_effects={'cost': 5000}, - piecewise_effects=test_piecewise, - ) - - # Should trigger 4 deprecation warnings (from kwargs) - assert len([warning for warning in w if issubclass(warning.category, DeprecationWarning)]) == 4 - - # Verify all mappings work (accessing new properties - no warnings) - assert params.effects_of_investment == {'cost': 25000} - assert params.effects_of_investment_per_size == {'cost': 1200} - assert params.effects_of_retirement == {'cost': 5000} - assert params.piecewise_effects_of_investment is test_piecewise - - # Verify old attributes still work (accessing deprecated properties - triggers warnings) - with pytest.warns(DeprecationWarning): - assert params.fix_effects == {'cost': 25000} - with pytest.warns(DeprecationWarning): - assert params.specific_effects == {'cost': 1200} - with pytest.warns(DeprecationWarning): - assert params.divest_effects == {'cost': 5000} - with pytest.warns(DeprecationWarning): - assert params.piecewise_effects is test_piecewise - - def test_both_old_and_new_raises_error(self): - """Test that specifying both old and new parameter names raises ValueError.""" - # fix_effects + effects_of_investment - with pytest.raises( - ValueError, match='Either fix_effects or effects_of_investment can be specified, but not both' - ): - InvestParameters( - fix_effects={'cost': 10000}, - effects_of_investment={'cost': 25000}, - ) - - # specific_effects + effects_of_investment_per_size - with pytest.raises( - ValueError, - match='Either specific_effects or effects_of_investment_per_size can be specified, but not both', - ): - InvestParameters( - specific_effects={'cost': 1200}, - effects_of_investment_per_size={'cost': 1500}, - ) - - # divest_effects + effects_of_retirement - with pytest.raises( - ValueError, match='Either divest_effects or effects_of_retirement can be specified, but not both' - ): - InvestParameters( - divest_effects={'cost': 5000}, - effects_of_retirement={'cost': 6000}, - ) - - # piecewise_effects + piecewise_effects_of_investment - from flixopt.interface import Piece, Piecewise, PiecewiseEffects - - test_piecewise1 = PiecewiseEffects( - piecewise_origin=Piecewise([Piece(0, 100)]), - piecewise_shares={'cost': Piecewise([Piece(800, 600)])}, - ) - test_piecewise2 = PiecewiseEffects( - piecewise_origin=Piecewise([Piece(0, 200)]), - piecewise_shares={'cost': Piecewise([Piece(900, 700)])}, - ) - with pytest.raises( - ValueError, - match='Either piecewise_effects or piecewise_effects_of_investment can be specified, but not both', - ): - InvestParameters( - piecewise_effects=test_piecewise1, - piecewise_effects_of_investment=test_piecewise2, - ) - - def test_piecewise_effects_of_investment_new_parameter(self): - """Test that piecewise_effects_of_investment works correctly.""" - from flixopt.interface import Piece, Piecewise, PiecewiseEffects - - test_piecewise = PiecewiseEffects( - piecewise_origin=Piecewise([Piece(0, 100)]), - piecewise_shares={'cost': Piecewise([Piece(800, 600)])}, - ) - - with warnings.catch_warnings(): - warnings.simplefilter('error', DeprecationWarning) - # Should not raise DeprecationWarning when using new parameter - params = InvestParameters(piecewise_effects_of_investment=test_piecewise) - assert params.piecewise_effects_of_investment is test_piecewise - - # Accessing deprecated property triggers warning - with pytest.warns(DeprecationWarning): - assert params.piecewise_effects is test_piecewise - - def test_backward_compatibility_with_features(self): - """Test that old attribute names remain accessible for features.py compatibility.""" - from flixopt.interface import Piece, Piecewise, PiecewiseEffects - - test_piecewise = PiecewiseEffects( - piecewise_origin=Piecewise([Piece(0, 100)]), - piecewise_shares={'cost': Piecewise([Piece(800, 600)])}, - ) - - params = InvestParameters( - effects_of_investment={'cost': 25000}, - effects_of_investment_per_size={'cost': 1200}, - effects_of_retirement={'cost': 5000}, - piecewise_effects_of_investment=test_piecewise, - ) - - # Old properties should still be accessible (for features.py) but with warnings - with pytest.warns(DeprecationWarning): - assert params.fix_effects == {'cost': 25000} - with pytest.warns(DeprecationWarning): - assert params.specific_effects == {'cost': 1200} - with pytest.warns(DeprecationWarning): - assert params.divest_effects == {'cost': 5000} - with pytest.warns(DeprecationWarning): - assert params.piecewise_effects is test_piecewise - - # Properties should return the same objects as the new attributes - with pytest.warns(DeprecationWarning): - assert params.fix_effects is params.effects_of_investment - with pytest.warns(DeprecationWarning): - assert params.specific_effects is params.effects_of_investment_per_size - with pytest.warns(DeprecationWarning): - assert params.divest_effects is params.effects_of_retirement - with pytest.warns(DeprecationWarning): - assert params.piecewise_effects is params.piecewise_effects_of_investment - - def test_empty_parameters(self): - """Test that empty/None parameters work correctly.""" - params = InvestParameters() - - assert params.effects_of_investment == {} - assert params.effects_of_investment_per_size == {} - assert params.effects_of_retirement == {} - assert params.piecewise_effects_of_investment is None - - # Old properties should also be empty (but with warnings) - with pytest.warns(DeprecationWarning): - assert params.fix_effects == {} - with pytest.warns(DeprecationWarning): - assert params.specific_effects == {} - with pytest.warns(DeprecationWarning): - assert params.divest_effects == {} - with pytest.warns(DeprecationWarning): - assert params.piecewise_effects is None - - def test_mixed_old_and_new_parameters(self): - """Test mixing old and new parameter names (not recommended but should work).""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - params = InvestParameters( - effects_of_investment={'cost': 25000}, # New - specific_effects={'cost': 1200}, # Old - effects_of_retirement={'cost': 5000}, # New - ) - - # Should only warn about the old parameter - assert len([warning for warning in w if issubclass(warning.category, DeprecationWarning)]) == 1 - - # All should work correctly - assert params.effects_of_investment == {'cost': 25000} - assert params.effects_of_investment_per_size == {'cost': 1200} - assert params.effects_of_retirement == {'cost': 5000} - - def test_unexpected_keyword_arguments(self): - """Test that unexpected keyword arguments raise TypeError.""" - # Single unexpected argument - with pytest.raises( - TypeError, match="InvestParameters.__init__\\(\\) got unexpected keyword argument\\(s\\): 'invalid_param'" - ): - InvestParameters(invalid_param='value') - - # Multiple unexpected arguments - with pytest.raises( - TypeError, - match="InvestParameters.__init__\\(\\) got unexpected keyword argument\\(s\\): 'param1', 'param2'", - ): - InvestParameters(param1='value1', param2='value2') - - # Mix of valid and invalid arguments - with pytest.raises( - TypeError, match="InvestParameters.__init__\\(\\) got unexpected keyword argument\\(s\\): 'typo'" - ): - InvestParameters(effects_of_investment={'cost': 100}, typo='value') - - def test_optional_parameter_deprecation(self): - """Test that optional parameter triggers deprecation warning and maps to mandatory.""" - # Test optional=True (should map to mandatory=False) - with pytest.warns(DeprecationWarning, match='optional.*deprecated.*mandatory'): - params = InvestParameters(optional=True) - assert params.mandatory is False - - # Test optional=False (should map to mandatory=True) - with pytest.warns(DeprecationWarning, match='optional.*deprecated.*mandatory'): - params = InvestParameters(optional=False) - assert params.mandatory is True - - def test_mandatory_parameter_no_warning(self): - """Test that mandatory parameter doesn't trigger warnings.""" - with warnings.catch_warnings(): - warnings.simplefilter('error', DeprecationWarning) - # Test mandatory=True - params = InvestParameters(mandatory=True) - assert params.mandatory is True - - # Test mandatory=False (explicit) - params = InvestParameters(mandatory=False) - assert params.mandatory is False - - def test_mandatory_default_value(self): - """Test that default value of mandatory is False when neither optional nor mandatory is specified.""" - params = InvestParameters() - assert params.mandatory is False - - def test_both_optional_and_mandatory_no_error(self): - """Test that specifying both optional and mandatory doesn't raise error. - - Note: Conflict checking is disabled for mandatory/optional because mandatory has - a non-None default value (False), making it impossible to distinguish between - an explicit mandatory=False and the default value. The deprecated optional - parameter will take precedence when both are specified. - """ - # When both are specified, optional takes precedence (with deprecation warning) - with pytest.warns(DeprecationWarning, match='optional.*deprecated.*mandatory'): - params = InvestParameters(optional=True, mandatory=False) - # optional=True should result in mandatory=False - assert params.mandatory is False - - with pytest.warns(DeprecationWarning, match='optional.*deprecated.*mandatory'): - params = InvestParameters(optional=False, mandatory=True) - # optional=False should result in mandatory=True (optional takes precedence) - assert params.mandatory is True - - def test_optional_property_deprecation(self): - """Test that accessing optional property triggers deprecation warning.""" - params = InvestParameters(mandatory=True) - - # Reading the property triggers warning - with pytest.warns(DeprecationWarning, match="Property 'optional' is deprecated"): - assert params.optional is False - - # Setting the property triggers warning - with pytest.warns(DeprecationWarning, match="Property 'optional' is deprecated"): - params.optional = True - assert params.mandatory is False From 2c6d923d22f6e70cffc73c28e82d6b9fb88f5412 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Wed, 19 Nov 2025 14:23:32 +0100 Subject: [PATCH 03/49] Update CHANGELOG.md to mention removed params --- CHANGELOG.md | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa5c445b4..603e162fe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -65,6 +65,45 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp ### 🔥 Removed +**Deprecated parameters removed** (all were deprecated in v4.0.0 or earlier): + +**TimeSeriesData:** +- `agg_group` → use `aggregation_group` +- `agg_weight` → use `aggregation_weight` +- Properties: `agg_group`, `agg_weight` + +**Effect:** +- Constructor parameters: `minimum_operation` → use `minimum_temporal`, `maximum_operation` → use `maximum_temporal`, `minimum_invest` → use `minimum_periodic`, `maximum_invest` → use `maximum_periodic`, `minimum_operation_per_hour` → use `minimum_per_hour`, `maximum_operation_per_hour` → use `maximum_per_hour` +- Properties: `minimum_operation`, `maximum_operation`, `minimum_invest`, `maximum_invest`, `minimum_operation_per_hour`, `maximum_operation_per_hour`, `minimum_total_per_period`, `maximum_total_per_period` + +**Flow:** +- Constructor parameters: `flow_hours_per_period_max` → use `flow_hours_max`, `flow_hours_per_period_min` → use `flow_hours_min`, `flow_hours_total_max` → use `flow_hours_max`, `flow_hours_total_min` → use `flow_hours_min`, `total_flow_hours_max` → use `flow_hours_max_over_periods`, `total_flow_hours_min` → use `flow_hours_min_over_periods` +- Properties: `flow_hours_total_max`, `flow_hours_total_min` + +**InvestParameters:** +- Constructor parameters: `fix_effects` → use `effects_of_investment`, `specific_effects` → use `effects_of_investment_per_size`, `divest_effects` → use `effects_of_retirement`, `piecewise_effects` → use `piecewise_effects_of_investment`, `optional` → use `mandatory` (with inverted logic) +- Properties: `optional`, `fix_effects`, `specific_effects`, `divest_effects`, `piecewise_effects` + +**OnOffParameters:** +- Constructor parameters: `on_hours_total_min` → use `on_hours_min`, `on_hours_total_max` → use `on_hours_max`, `switch_on_total_max` → use `switch_on_max` + +**Storage:** +- `initial_charge_state="lastValueOfSim"` → use `initial_charge_state="equals_final"` + +**Source, Sink, SourceAndSink:** +- Constructor parameters: + - Source: `source` → use `outputs` + - Sink: `sink` → use `inputs` + - SourceAndSink: `source` → use `outputs`, `sink` → use `inputs`, `prevent_simultaneous_sink_and_source` → use `prevent_simultaneous_flow_rates` +- Properties: + - Source: `source` property + - Sink: `sink` property + - SourceAndSink: `source`, `sink`, `prevent_simultaneous_sink_and_source` properties + +**Linear Converters** (Boiler, CHP, HeatPump, etc.): +- Flow parameters: `Q_fu` → use `fuel_flow`, `P_el` → use `electrical_flow`, `Q_th` → use `thermal_flow`, `Q_ab` → use `heat_source_flow` +- Efficiency parameters: `eta` → use `thermal_efficiency`, `eta_th` → use `thermal_efficiency`, `eta_el` → use `electrical_efficiency`, `COP` → use `cop` + ### 🐛 Fixed ### 🔒 Security From a8612b014a9325b1d008e8fd01895e35fd5ca79a Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sun, 30 Nov 2025 01:58:29 +0100 Subject: [PATCH 04/49] Merge branch 'main' into feature/v5 # Conflicts: # flixopt/core.py # flixopt/effects.py # test_deprecations.py # tests/test_invest_parameters_deprecation.py --- .github/workflows/python-app.yaml | 417 -------- .github/workflows/release.yaml | 224 ++++ .github/workflows/test.yaml | 116 ++ CITATION.cff | 71 ++ README.md | 29 +- ...tion Modes.md => 03-Optimization Modes.md} | 4 +- docs/examples/index.md | 2 +- docs/getting-started.md | 2 +- docs/index.md | 4 +- docs/stylesheets/extra.css | 24 +- docs/user-guide/core-concepts.md | 30 +- .../mathematical-notation/dimensions.md | 9 +- .../effects-penalty-objective.md | 99 +- docs/user-guide/migration-guide-v3.md | 12 +- examples/00_Minmal/minimal_example.py | 4 +- examples/01_Simple/simple_example.py | 20 +- examples/02_Complex/complex_example.py | 18 +- .../02_Complex/complex_example_results.py | 2 +- .../example_optimization_modes.py} | 74 +- examples/04_Scenarios/scenario_example.py | 22 +- .../two_stage_optimization.py | 6 +- flixopt/__init__.py | 20 +- flixopt/calculation.py | 719 ++----------- flixopt/{aggregation.py => clustering.py} | 91 +- flixopt/components.py | 3 - flixopt/config.py | 260 +++-- flixopt/elements.py | 24 +- flixopt/flow_system.py | 23 +- flixopt/interface.py | 8 +- flixopt/io.py | 28 +- flixopt/linear_converters.py | 3 - flixopt/modeling.py | 2 +- flixopt/network_app.py | 3 +- flixopt/optimization.py | 989 ++++++++++++++++++ flixopt/plotting.py | 2 - flixopt/results.py | 404 ++++--- mkdocs.yml | 6 +- pyproject.toml | 5 +- scripts/extract_changelog.py | 1 - tests/conftest.py | 34 +- tests/test_bus.py | 20 +- tests/test_component.py | 12 +- tests/test_config.py | 64 +- tests/test_dataconverter.py | 2 +- tests/test_deprecations.py | 613 +++++++++++ tests/test_effect.py | 29 +- tests/test_flow_system_resample.py | 6 +- tests/test_functional.py | 10 +- tests/test_integration.py | 67 +- tests/test_io.py | 10 +- tests/test_overwrite_protection.py | 64 ++ tests/test_results_plots.py | 10 +- tests/test_scenarios.py | 55 +- 53 files changed, 3178 insertions(+), 1598 deletions(-) delete mode 100644 .github/workflows/python-app.yaml create mode 100644 .github/workflows/release.yaml create mode 100644 .github/workflows/test.yaml create mode 100644 CITATION.cff rename docs/examples/{03-Calculation Modes.md => 03-Optimization Modes.md} (56%) rename examples/{03_Calculation_types/example_calculation_types.py => 03_Optimization_modes/example_optimization_modes.py} (74%) rename flixopt/{aggregation.py => clustering.py} (83%) create mode 100644 flixopt/optimization.py create mode 100644 tests/test_deprecations.py create mode 100644 tests/test_overwrite_protection.py diff --git a/.github/workflows/python-app.yaml b/.github/workflows/python-app.yaml deleted file mode 100644 index 009c85efa..000000000 --- a/.github/workflows/python-app.yaml +++ /dev/null @@ -1,417 +0,0 @@ -name: Python Package CI/CD - -on: - push: - branches: [main] # Only main branch - tags: ['v*.*.*'] - pull_request: - branches: [main, 'dev*', 'dev/**', 'feature/**'] - types: [opened, synchronize, reopened] - paths-ignore: - - 'docs/**' - - '*.md' - - 'README*' - workflow_dispatch: # Allow manual triggering - -# Set permissions for security -permissions: - contents: read - -# Cancel previous runs on new push -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -env: - PYTHON_VERSION: "3.11" - MPLBACKEND: Agg # Non-interactive matplotlib backend for CI/testing - PLOTLY_RENDERER: json # Headless plotly renderer for CI/testing - FLIXOPT_CI: false # Disable interactive plotting for CI/testing - -jobs: - lint: - runs-on: ubuntu-24.04 - steps: - - name: Check out code - uses: actions/checkout@v5 - - - name: Set up uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.9.8" - enable-cache: true - - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Ruff - run: | - uvx ruff --version - - - name: Run Ruff Linting - run: | - echo "::group::Ruff Linting" - uvx ruff check . --output-format=github - echo "::endgroup::" - - - name: Run Ruff Formatting Check - run: | - echo "::group::Ruff Formatting" - uvx ruff format --check --diff . - echo "::endgroup::" - - test: - runs-on: ubuntu-24.04 - timeout-minutes: 30 - needs: lint # Run tests only after linting passes - strategy: - fail-fast: false # Continue testing other Python versions if one fails - matrix: - python-version: ['3.10', '3.11', '3.12', '3.13'] - - steps: - - name: Check out code - uses: actions/checkout@v5 - - - name: Set up uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.9.8" - enable-cache: true - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - - - name: Install dependencies - run: | - uv pip install --system .[dev] - - - name: Run tests - run: pytest -v --numprocesses=auto - - test-examples: - runs-on: ubuntu-24.04 - timeout-minutes: 45 - needs: lint - # Only run examples on releases (tags) - if: startsWith(github.ref, 'refs/tags/v') || (github.event_name == 'push' && github.ref == 'refs/heads/main') - - steps: - - name: Check out code - uses: actions/checkout@v5 - - - name: Set up uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.9.8" - enable-cache: true - - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@v6 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install dependencies - run: | - uv pip install --system .[dev] - - - name: Run example tests - run: pytest -v -m examples --numprocesses=auto - - security: - name: Security Scan - runs-on: ubuntu-24.04 - needs: lint - steps: - - name: Check out code - uses: actions/checkout@v5 - - - name: Set up uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.9.8" - enable-cache: true - - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Run Bandit security scan - run: | - # Gate on HIGH severity & MEDIUM confidence; produce JSON artifact - uvx bandit -r flixopt/ -c pyproject.toml -f json -o bandit-report.json -q --severity-level high --confidence-level medium - # Human-readable output without affecting job status - uvx bandit -r flixopt/ -c pyproject.toml -q --exit-zero - - - name: Upload security reports - uses: actions/upload-artifact@v4 - if: always() - with: - name: security-report - path: bandit-report.json - retention-days: 30 - - create-release: - name: Create GitHub Release - runs-on: ubuntu-24.04 - permissions: - contents: write - needs: [lint, test, test-examples, security] - if: startsWith(github.ref, 'refs/tags/v') - - steps: - - name: Checkout repository - uses: actions/checkout@v5 - with: - fetch-depth: 0 - - - name: Set up uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.9.8" - enable-cache: true - - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Extract release notes - run: | - VERSION=${GITHUB_REF#refs/tags/v} - echo "Extracting release notes for version: $VERSION" - python scripts/extract_release_notes.py $VERSION > current_release_notes.md - - - name: Create GitHub Release - uses: softprops/action-gh-release@v2 - with: - body_path: current_release_notes.md - draft: false - prerelease: ${{ contains(github.ref, 'alpha') || contains(github.ref, 'beta') || contains(github.ref, 'rc') }} - generate_release_notes: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - publish-testpypi: - name: Publish to TestPyPI - runs-on: ubuntu-24.04 - needs: [test, test-examples, create-release] # Run after tests and release creation - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') # Only on tag push - environment: - name: testpypi - url: https://test.pypi.org/p/flixopt - env: - SKIP_TESTPYPI_UPLOAD: "false" - - steps: - - name: Checkout repository - uses: actions/checkout@v5 - - - name: Set up uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.9.8" - enable-cache: true - - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install dependencies - run: | - uv pip install --system twine - - - name: Build the distribution - run: | - uv build - - - name: Upload to TestPyPI - run: | - twine upload --repository-url https://test.pypi.org/legacy/ dist/* --verbose --skip-existing - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.TEST_PYPI_API_TOKEN }} - TWINE_NON_INTERACTIVE: "1" - - - name: Test install from TestPyPI - if: env.SKIP_TESTPYPI_UPLOAD != 'true' - run: | - set -Eeuo pipefail - # Create a temporary environment to test installation - uv venv test_env - source test_env/bin/activate - - # Get project name from pyproject.toml (PEP 621) - PACKAGE_NAME=$(python - <<'PY' - import sys, tomllib, pathlib - data = tomllib.loads(pathlib.Path("pyproject.toml").read_text(encoding="utf-8")) - print(data["project"]["name"]) - PY - ) - - # Extract version from git tag - VERSION=${GITHUB_REF#refs/tags/v} - - # Wait and retry while TestPyPI indexes the package - INSTALL_SUCCESS=false - for d in 10 20 40 80 120; do - sleep "$d" - echo "Attempting to install $PACKAGE_NAME==$VERSION from TestPyPI (retry after ${d}s)..." - - # Install specific version and verify it matches - if uv pip install --index-strategy unsafe-best-match --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ "$PACKAGE_NAME==$VERSION" && \ - python -c "from importlib.metadata import version; installed = version('$PACKAGE_NAME'); print(f'Installed: {installed}'); assert '$VERSION' == installed"; then - INSTALL_SUCCESS=true - break - fi - done - - # Check if installation succeeded - if [ "$INSTALL_SUCCESS" = "false" ]; then - echo "ERROR: Failed to install $PACKAGE_NAME==$VERSION from TestPyPI after all retries" - echo "This could indicate:" - echo " - TestPyPI indexing issues" - echo " - Package upload problems" - echo " - Version mismatch between tag and package" - exit 1 - fi - - # Final success confirmation - python -c "import flixopt; print('TestPyPI installation successful!')" - - publish-pypi: - name: Publish to PyPI - runs-on: ubuntu-24.04 - needs: [publish-testpypi] # Only run after TestPyPI publish succeeds - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') # Only on tag push - environment: - name: pypi - url: https://pypi.org/p/flixopt - - steps: - - name: Checkout repository - uses: actions/checkout@v5 - - - name: Set up uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.9.8" - enable-cache: true - - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install dependencies - run: | - uv pip install --system twine - - - name: Build the distribution - run: | - uv build - - - name: Upload to PyPI - run: | - twine upload dist/* --verbose --skip-existing - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} - TWINE_NON_INTERACTIVE: "1" - - - name: Verify PyPI installation - run: | - set -Eeuo pipefail - # Create a temporary environment to test installation - uv venv prod_test_env - source prod_test_env/bin/activate - - # Get project name from pyproject.toml (PEP 621) - PACKAGE_NAME=$(python - <<'PY' - import sys, tomllib, pathlib - data = tomllib.loads(pathlib.Path("pyproject.toml").read_text(encoding="utf-8")) - print(data["project"]["name"]) - PY - ) - - # Extract version from git tag - VERSION=${GITHUB_REF#refs/tags/v} - - # Wait and retry while PyPI indexes the package - INSTALL_SUCCESS=false - for d in 10 20 40 60 90 120 180 300 480 600; do # Total: up to ~30 minutes - sleep "$d" - echo "Attempting to install $PACKAGE_NAME==$VERSION from PyPI (retry after ${d}s)..." - # Install directly from pypi, potentially mitigatiing caches - if uv pip install --index-url https://pypi.org/simple/ "$PACKAGE_NAME==$VERSION" && \ - python -c "from importlib.metadata import version; installed = version('$PACKAGE_NAME'); print(f'Installed: {installed}'); assert '$VERSION' == installed"; then - INSTALL_SUCCESS=true - break - fi - done - - if [ "$INSTALL_SUCCESS" = "false" ]; then - echo "ERROR: Failed to install $PACKAGE_NAME==$VERSION from PyPI after all retries" - echo "Check: https://pypi.org/project/$PACKAGE_NAME/$VERSION/" - exit 1 - fi - - # Final success confirmation - python -c "import flixopt; print('PyPI installation successful!')" - - deploy-docs: - name: Deploy Documentation - runs-on: ubuntu-24.04 - permissions: - contents: write - needs: [publish-pypi] # Deploy docs after successful PyPI publishing - if: startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, 'alpha') && !contains(github.ref, 'beta') && !contains(github.ref, 'rc') - - steps: - - name: Checkout repository - uses: actions/checkout@v5 - with: - fetch-depth: 0 # Fetch all history for proper versioning - - - name: Set up uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.9.8" - enable-cache: true - - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Extract changelog to docs - run: | - # Install packaging dependency for changelog extraction - uv pip install --system packaging - - # Extract individual release files - python scripts/extract_changelog.py - - echo "✅ Extracted changelog to docs/changelog/" - - - name: Install documentation dependencies - run: | - uv pip install --system ".[docs]" - - - name: Configure Git Credentials - run: | - git config user.name github-actions[bot] - git config user.email 41898282+github-actions[bot]@users.noreply.github.com - - - name: Deploy docs - run: | - VERSION=${GITHUB_REF#refs/tags/v} - echo "Deploying docs after successful PyPI publish: $VERSION" - mike deploy --push --update-aliases $VERSION latest - mike set-default --push latest diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 000000000..8d0d0de1f --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,224 @@ +name: Release + +on: + push: + tags: + - v*.*.* + +env: + PYTHON_VERSION: "3.11" + PREPARATION_COMMIT: '[ci] prepare release ${{ github.ref_name }}' + +jobs: + check-preparation: + name: Check if release is prepared + runs-on: ubuntu-24.04 + outputs: + prepared: ${{ steps.validate.outputs.prepared }} + steps: + - uses: actions/checkout@v5 + + - name: Validate commit message + id: validate + run: | + COMMIT_MESSAGE=$(git log -1 --pretty=%B) + echo "Expected: '${{ env.PREPARATION_COMMIT }}'" + echo "Received: '$COMMIT_MESSAGE'" + + prepared="false" + if [[ "$COMMIT_MESSAGE" == "${{ env.PREPARATION_COMMIT }}" ]]; then + prepared="true" + fi + + echo "prepared=$prepared" >> $GITHUB_OUTPUT + + prepare-release: + name: Prepare release + needs: [check-preparation] + if: needs.check-preparation.outputs.prepared == 'false' + runs-on: ubuntu-24.04 + steps: + - name: Generate token for Release Bot + id: generate-token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ vars.RELEASE_BOT_APP_ID }} + private-key: ${{ secrets.RELEASE_BOT_PRIVATE_KEY }} + + - uses: actions/checkout@v5 + with: + fetch-depth: 0 + ref: main + token: ${{ steps.generate-token.outputs.token }} + + - name: Configure Git + run: | + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + + - name: Update CITATION.cff + run: | + VERSION=${GITHUB_REF#refs/tags/v} + DATE=$(date +%Y-%m-%d) + sed -i "s/^version: .*/version: $VERSION/" CITATION.cff + sed -i "s/^date-released: .*/date-released: $DATE/" CITATION.cff + + - name: Remove previous tag + run: | + git tag -d ${{ github.ref_name }} + git push origin --delete ${{ github.ref_name }} + + - name: Commit and re-tag + run: | + git add CITATION.cff + git commit -m "${{ env.PREPARATION_COMMIT }}" + git push origin main + git tag -a ${{ github.ref_name }} -m "${{ github.ref_name }}" + git push origin ${{ github.ref_name }} + + test: + name: Run tests + needs: [check-preparation] + if: needs.check-preparation.outputs.prepared == 'true' + uses: ./.github/workflows/test.yaml + + build: + name: Build package + needs: [check-preparation, test] + if: needs.check-preparation.outputs.prepared == 'true' + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v5 + + - uses: astral-sh/setup-uv@v6 + with: + version: "0.9.10" + enable-cache: true + + - uses: actions/setup-python@v6 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Build package + run: uv build + + - uses: actions/upload-artifact@v4 + with: + name: dist + path: dist/ + retention-days: 7 + + publish-pypi: + name: Publish to PyPI + needs: [build] + runs-on: ubuntu-24.04 + environment: + name: pypi + url: https://pypi.org/p/flixopt + permissions: + id-token: write + steps: + - uses: actions/download-artifact@v4 + with: + name: dist + path: dist/ + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + skip-existing: true + + verify-pypi: + name: Verify PyPI installation + needs: [publish-pypi] + runs-on: ubuntu-24.04 + steps: + - uses: astral-sh/setup-uv@v6 + with: + version: "0.9.10" + + - uses: actions/setup-python@v6 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Verify installation + run: | + VERSION=${GITHUB_REF#refs/tags/v} + + for delay in 10 20 40 60 90 120 180 300; do + sleep $delay + echo "Attempting installation (waited ${delay}s)..." + + if uv pip install --system --index-url https://pypi.org/simple/ "flixopt==$VERSION" && \ + python -c "from importlib.metadata import version; assert version('flixopt') == '$VERSION'"; then + echo "PyPI installation successful!" + exit 0 + fi + done + + echo "Failed to verify PyPI installation" + exit 1 + + create-release: + name: Create GitHub release + needs: [verify-pypi] + runs-on: ubuntu-24.04 + permissions: + contents: write + steps: + - uses: actions/checkout@v5 + + - uses: actions/setup-python@v6 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Extract release notes + run: | + VERSION=${GITHUB_REF#refs/tags/v} + python scripts/extract_release_notes.py $VERSION > current_release_notes.md + + - uses: softprops/action-gh-release@v2 + with: + body_path: current_release_notes.md + draft: false + prerelease: ${{ contains(github.ref, 'alpha') || contains(github.ref, 'beta') || contains(github.ref, 'rc') }} + generate_release_notes: true + + deploy-docs: + name: Deploy documentation + needs: [create-release] + if: "!contains(github.ref, 'alpha') && !contains(github.ref, 'beta') && !contains(github.ref, 'rc')" + runs-on: ubuntu-24.04 + permissions: + contents: write + steps: + - uses: actions/checkout@v5 + with: + fetch-depth: 0 + + - uses: astral-sh/setup-uv@v6 + with: + version: "0.9.10" + + - uses: actions/setup-python@v6 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Extract changelog + run: | + uv pip install --system packaging + python scripts/extract_changelog.py + + - name: Install docs dependencies + run: uv pip install --system ".[docs]" + + - name: Configure Git + run: | + git config user.name github-actions[bot] + git config user.email 41898282+github-actions[bot]@users.noreply.github.com + + - name: Deploy docs + run: | + VERSION=${GITHUB_REF#refs/tags/v} + mike deploy --push --update-aliases $VERSION latest + mike set-default --push latest diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 000000000..395fc766c --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,116 @@ +name: Tests + +on: + push: + branches: [main] + pull_request: + branches: ["*"] + workflow_dispatch: + workflow_call: # Allow release.yaml to call this workflow + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + PYTHON_VERSION: "3.11" + MPLBACKEND: Agg + PLOTLY_RENDERER: json + FLIXOPT_CI: false + +jobs: + lint: + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v5 + + - uses: astral-sh/setup-uv@v6 + with: + version: "0.9.10" + enable-cache: true + + - uses: actions/setup-python@v6 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Run Ruff + run: | + uvx ruff check . --output-format=github + uvx ruff format --check --diff . + + test: + runs-on: ubuntu-24.04 + timeout-minutes: 30 + needs: lint + strategy: + fail-fast: false + matrix: + python-version: ['3.10', '3.11', '3.12', '3.13'] + steps: + - uses: actions/checkout@v5 + + - uses: astral-sh/setup-uv@v6 + with: + version: "0.9.10" + enable-cache: true + + - uses: actions/setup-python@v6 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: uv pip install --system .[dev] + + - name: Run tests + run: pytest -v --numprocesses=auto + + test-examples: + runs-on: ubuntu-24.04 + timeout-minutes: 45 + needs: lint + # Only run on main branch or when called by release workflow (not on PRs) + if: github.event_name != 'pull_request' + steps: + - uses: actions/checkout@v5 + + - uses: astral-sh/setup-uv@v6 + with: + version: "0.9.10" + enable-cache: true + + - uses: actions/setup-python@v6 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Install dependencies + run: uv pip install --system .[dev] + + - name: Run example tests + run: pytest -v -m examples --numprocesses=auto + + security: + runs-on: ubuntu-24.04 + needs: lint + steps: + - uses: actions/checkout@v5 + + - uses: astral-sh/setup-uv@v6 + with: + version: "0.9.10" + enable-cache: true + + - uses: actions/setup-python@v6 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Run Bandit + run: | + uvx bandit -r flixopt/ -c pyproject.toml -f json -o bandit-report.json -q --severity-level high --confidence-level medium + uvx bandit -r flixopt/ -c pyproject.toml -q --exit-zero + + - uses: actions/upload-artifact@v4 + if: always() + with: + name: security-report + path: bandit-report.json + retention-days: 30 diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 000000000..6a446f1c4 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,71 @@ +cff-version: 1.2.0 +message: "If you use this software, please cite it as below and consider citing the related publication." +type: software +title: "flixopt" +version: 4.3.5-beta +date-released: 2025-11-29 +url: "https://github.com/flixOpt/flixopt" +repository-code: "https://github.com/flixOpt/flixopt" +license: MIT +abstract: "FlixOpt (Flexible, Low-entry, Investment, X-sector OPTimization) is a comprehensive framework for modeling and optimizing energy and material flow systems in Python. It enables optimization of diverse applications including district heating networks, industrial production lines, renewable energy portfolios, and supply chain logistics. Built on modern scientific Python stack (linopy and xarray), it provides a progressive enhancement approach allowing users to start with simple models and incrementally add complexity such as multi-period investments, stochastic scenarios, and custom constraints. The framework simplifies the creation of global constraints and switching objectives through its 'effect' concept. The framework is designed for researchers and engineers in energy systems, industrial process optimization, and operations research." +keywords: + - optimization + - energy systems + - energy flow modeling + - linear programming + - mixed-integer programming + - MILP + - operations research + - python + - district heating + - renewable energy + - multi-period optimization + - investment optimization + - capacity planning + - energy modeling + - sector coupling + - energy transition + - industrial processes + - stochastic optimization + - linopy + - xarray +authors: + - family-names: Bumann + given-names: Felix + email: felixbumann387@gmail.com + affiliation: "SachsenEnergie AG" + orcid: "https://orcid.org/0009-0006-0765-4789" + - family-names: Panitz + given-names: Felix + email: baumbude@googlemail.com + affiliation: "Fraunhofer Research Institution for Energy Infrastructures and Geotechnologies IEG" + orcid: "https://orcid.org/0009-0007-7030-6987" + - family-names: Stange + given-names: Peter + email: peter.stange@tu-dresden.de + affiliation: "Chair of Building Energy Systems and Heat Supply, TU Dresden" + orcid: "https://orcid.org/0009-0001-6407-1495" +identifiers: + - type: doi + value: "10.18086/eurosun.2022.04.07" + description: "Software-supported Investment Optimization for District Heating Supply Systems" + - type: url + value: "https://flixopt.github.io/flixopt/latest/" + description: "Documentation" + - type: url + value: "https://pypi.org/project/flixopt/" + description: "PyPI package" +references: + - type: conference-paper + authors: + - family-names: Panitz + given-names: Felix + - family-names: Behrends + given-names: Tim + - family-names: Stange + given-names: Peter + title: "Software-supported Investment Optimization for District Heating Supply Systems" + year: 2022 + conference: + name: "EuroSun 2022" + doi: "10.18086/eurosun.2022.04.07" diff --git a/README.md b/README.md index 0a90dcb33..6d049819d 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,12 @@ -# FlixOpt: Energy and Material Flow Optimization Framework +# FlixOpt: Progressive Flow System Optimization + +

+ Flexible  •  Low-entry  •  Investment  •  X-sector  •  OPTimization +

+ +

+ Model more than costs · Easy to prototype · Based on dispatch · Sector coupling · Mathematical optimization +

[![Documentation](https://img.shields.io/badge/docs-latest-brightgreen.svg)](https://flixopt.github.io/flixopt/latest/) [![Build Status](https://github.com/flixOpt/flixopt/actions/workflows/python-app.yaml/badge.svg)](https://github.com/flixOpt/flixopt/actions/workflows/python-app.yaml) @@ -9,18 +17,18 @@ [![PyPI downloads](https://img.shields.io/pypi/dm/flixopt)](https://pypi.org/project/flixopt/) [![GitHub last commit](https://img.shields.io/github/last-commit/flixOpt/flixopt)](https://github.com/flixOpt/flixopt/commits/main) [![GitHub issues](https://img.shields.io/github/issues/flixOpt/flixopt)](https://github.com/flixOpt/flixopt/issues) -[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/flixOpt/flixopt/main.svg)](https://results.pre-commit.ci/latest/github/flixOpt/flixopt/main) [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![Powered by linopy](https://img.shields.io/badge/powered%20by-linopy-blue)](https://github.com/PyPSA/linopy/) [![Powered by xarray](https://img.shields.io/badge/powered%20by-xarray-blue)](https://xarray.dev/) +[![DOI](https://zenodo.org/badge/540378857.svg)](https://doi.org/10.5281/zenodo.17448623) [![DOI](https://img.shields.io/badge/DOI-10.18086%2Feurosun.2022.04.07-blue)](https://doi.org/10.18086/eurosun.2022.04.07) [![GitHub stars](https://img.shields.io/github/stars/flixOpt/flixopt?style=social)](https://github.com/flixOpt/flixopt/stargazers) --- -**FlixOpt is a Python framework for optimizing energy and material flow systems** - from district heating networks to industrial production lines, from renewable energy portfolios to supply chain logistics. +**FlixOpt is a Python framework for progressive flow system optimization** - from district heating networks to industrial production lines, from renewable energy portfolios to supply chain logistics. -**Start simple, scale complex:** Build a working optimization model in minutes, then progressively add detail - multi-period investments, stochastic scenarios, custom constraints - without rewriting your code. +Build simple models quickly, then incrementally add investment decision, multi-period planning, stochastic scenarios, and custom constraints without refactoring. --- @@ -42,11 +50,11 @@ flow_system = fx.FlowSystem(timesteps) flow_system.add_elements(buses, components, effects) # 2. Create and solve -calculation = fx.FullCalculation("MyModel", flow_system) -calculation.solve() +optimization = fx.Optimization("MyModel", flow_system) +optimization.solve(fx.solvers.HighsSolver()) # 3. Analyze results -calculation.results.solution +optimization.results.solution ``` **Get started with real examples:** @@ -90,8 +98,8 @@ boiler = fx.Boiler("Boiler", eta=0.9, ...) **Multi-criteria optimization:** Model costs, emissions, resource use - any custom metric. Optimize single objectives or use weighted combinations and ε-constraints. → [Effects documentation](https://flixopt.github.io/flixopt/latest/user-guide/mathematical-notation/effects-penalty-objective/) -**Performance at any scale:** Choose calculation modes without changing your model - Full, Segmented, or Aggregated (using [TSAM](https://github.com/FZJ-IEK3-VSA/tsam)). -→ [Calculation modes](https://flixopt.github.io/flixopt/latest/api-reference/calculation/) +**Performance at any scale:** Choose optimization modes without changing your model - Optimization, SegmentedOptimization, or ClusteredOptimization (using [TSAM](https://github.com/FZJ-IEK3-VSA/tsam)). +→ [Optimization modes](https://flixopt.github.io/flixopt/latest/api-reference/optimization/) **Built for reproducibility:** Self-contained NetCDF result files with complete model information. Load results months later - everything is preserved. → [Results documentation](https://flixopt.github.io/flixopt/latest/api-reference/results/) @@ -185,6 +193,9 @@ If FlixOpt supports your research or project, please cite: - **Main Citation:** [DOI:10.18086/eurosun.2022.04.07](https://doi.org/10.18086/eurosun.2022.04.07) - **Short Overview:** [DOI:10.13140/RG.2.2.14948.24969](https://doi.org/10.13140/RG.2.2.14948.24969) +To pinpoint which version you used in your work, please reference one of these doi's here: +- [![DOI](https://zenodo.org/badge/540378857.svg)](https://doi.org/10.5281/zenodo.17448623) + --- ## 📄 License diff --git a/docs/examples/03-Calculation Modes.md b/docs/examples/03-Optimization Modes.md similarity index 56% rename from docs/examples/03-Calculation Modes.md rename to docs/examples/03-Optimization Modes.md index dd0321d43..880366906 100644 --- a/docs/examples/03-Calculation Modes.md +++ b/docs/examples/03-Optimization Modes.md @@ -1,5 +1,5 @@ -# Calculation Mode comparison +# Optimization Modes Comparison **Note:** This example relies on time series data. You can find it in the `examples` folder of the FlixOpt repository. ```python -{! ../examples/03_Calculation_types/example_calculation_types.py !} +{! ../examples/03_Optimization_modes/example_optimization_modes.py !} ``` diff --git a/docs/examples/index.md b/docs/examples/index.md index 16a15d20e..b5534b8e3 100644 --- a/docs/examples/index.md +++ b/docs/examples/index.md @@ -9,6 +9,6 @@ We work on improving this gallery. If you have something to share, please contac 1. [Minimal Example](00-Minimal Example.md) - The simplest possible FlixOpt model 2. [Simple Example](01-Basic Example.md) - A basic example with more features 3. [Complex Example](02-Complex Example.md) - A comprehensive example with result saving and loading -4. [Calculation Modes](03-Calculation Modes.md) - Comparison of different calculation modes +4. [Optimization Modes](03-Optimization Modes.md) - Comparison of different optimization modes 5. [Scenarios](04-Scenarios.md) - Working with scenarios in FlixOpt 6. [Two-stage Optimization](05-Two-stage-optimization.md) - Two-stage optimization approach diff --git a/docs/getting-started.md b/docs/getting-started.md index cd558ce79..0cdd2a5a7 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -53,7 +53,7 @@ Working with FlixOpt follows a general pattern: 2. **Define [`Effects`][flixopt.effects.Effect]** (costs, emissions, etc.) 3. **Define [`Buses`][flixopt.elements.Bus]** as connection points in your system 4. **Add [`Components`][flixopt.components]** like converters, storage, sources/sinks with their Flows -5. **Run [`Calculations`][flixopt.calculation]** to optimize your system +5. **Run [`Optimizations`][flixopt.optimization]** to optimize your system 6. **Analyze [`Results`][flixopt.results]** using built-in or external visualization tools ## Next Steps diff --git a/docs/index.md b/docs/index.md index c9b01f284..3467bb394 100644 --- a/docs/index.md +++ b/docs/index.md @@ -9,7 +9,9 @@ hide:

flixOpt

-

Energy and Material Flow Optimization Framework

+

Flexible · Low-entry · Investment · X-sector · OPTimization

+ +

Model more than costs · Easy to prototype · Based on dispatch · Sector coupling · Mathematical optimization

Model, optimize, and analyze complex energy systems with a powerful Python framework designed for flexibility and performance.

diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css index f1b7c03d5..2992267b6 100644 --- a/docs/stylesheets/extra.css +++ b/docs/stylesheets/extra.css @@ -1,5 +1,5 @@ /* ============================================================================ - flixOpt Custom Styling with Custom Palette + FlixOpt Custom Styling with Custom Palette ========================================================================= */ /* Root variables for easy customization */ @@ -526,10 +526,30 @@ .hero-section .tagline { font-size: 1.5rem; color: var(--md-default-fg-color--light); - margin-bottom: 2rem; + margin-bottom: 1rem; font-weight: 300; } +/* Backronym styling */ +.hero-section .backronym { + font-size: 1.3rem; + font-weight: 500; + letter-spacing: 0.05em; + margin-bottom: 0.5rem; +} + +.hero-section .backronym .letter { + color: var(--md-primary-fg-color); + font-weight: 700; +} + +.hero-section .backronym-desc { + font-size: 0.95rem; + color: var(--md-default-fg-color--light); + margin-bottom: 2rem; + opacity: 0.85; +} + .hero-buttons { display: flex; gap: 1rem; diff --git a/docs/user-guide/core-concepts.md b/docs/user-guide/core-concepts.md index bf52a26ba..f165f1e4e 100644 --- a/docs/user-guide/core-concepts.md +++ b/docs/user-guide/core-concepts.md @@ -98,23 +98,23 @@ This approach allows for multi-criteria optimization using both: - **Weighted Sum Method**: Optimize a theoretical Effect which other Effects crosslink to - **ε-constraint method**: Constrain effects to specific limits -### Calculation +### Optimization -A [`FlowSystem`][flixopt.flow_system.FlowSystem] can be converted to a Model and optimized by creating a [`Calculation`][flixopt.calculation.Calculation] from it. +A [`FlowSystem`][flixopt.flow_system.FlowSystem] can be converted to a Model and optimized by creating an [`Optimization`][flixopt.optimization.Optimization] from it. -FlixOpt offers different calculation modes: +FlixOpt offers different optimization modes: -- [`FullCalculation`][flixopt.calculation.FullCalculation] - Solves the entire problem at once -- [`SegmentedCalculation`][flixopt.calculation.SegmentedCalculation] - Solves the problem in segments (with optioinal overlap), improving performance for large problems -- [`AggregatedCalculation`][flixopt.calculation.AggregatedCalculation] - Uses typical periods to reduce computational requirements +- [`Optimization`][flixopt.optimization.Optimization] - Solves the entire problem at once +- [`SegmentedOptimization`][flixopt.optimization.SegmentedOptimization] - Solves the problem in segments (with optional overlap), improving performance for large problems +- [`ClusteredOptimization`][flixopt.optimization.ClusteredOptimization] - Uses typical periods to reduce computational requirements ### Results -The results of a calculation are stored in a [`CalculationResults`][flixopt.results.CalculationResults] object. -This object contains the solutions of the optimization as well as all information about the [`Calculation`][flixopt.calculation.Calculation] and the [`FlowSystem`][flixopt.flow_system.FlowSystem] it was created from. -The solution is stored as an `xarray.Dataset`, but can be accessed through their assotiated Component, Bus or Effect. +The results of an optimization are stored in a [`Results`][flixopt.results.Results] object. +This object contains the solutions of the optimization as well as all information about the [`Optimization`][flixopt.optimization.Optimization] and the [`FlowSystem`][flixopt.flow_system.FlowSystem] it was created from. +The solution is stored as an `xarray.Dataset`, but can be accessed through their associated Component, Bus or Effect. -This [`CalculationResults`][flixopt.results.CalculationResults] object can be saved to file and reloaded from file, allowing you to analyze the results anytime after the solve. +This [`Results`][flixopt.results.Results] object can be saved to file and reloaded from file, allowing you to analyze the results anytime after the solve. ## How These Concepts Work Together @@ -128,12 +128,12 @@ The process of working with FlixOpt can be divided into 3 steps: - Add - [`FlowSystems`][flixopt.flow_system.FlowSystem] can also be loaded from a netCDF file* 2. Translate the model to a mathematical optimization problem - - Create a [`Calculation`][flixopt.calculation.Calculation] from your FlowSystem and choose a Solver - - ...The Calculation is translated internally to a mathematical optimization problem... + - Create an [`Optimization`][flixopt.optimization.Optimization] from your FlowSystem and choose a Solver + - ...The Optimization is translated internally to a mathematical optimization problem... - ...and solved by the chosen solver. 3. Analyze the results - - The results are stored in a [`CalculationResults`][flixopt.results.CalculationResults] object - - This object can be saved to file and reloaded from file, retaining all information about the calculation + - The results are stored in a [`Results`][flixopt.results.Results] object + - This object can be saved to file and reloaded from file, retaining all information about the optimization - As it contains the used [`FlowSystem`][flixopt.flow_system.FlowSystem], it fully documents all assumptions taken to create the results.
@@ -152,4 +152,4 @@ This allows to adjust your model to very specific requirements without loosing t - + diff --git a/docs/user-guide/mathematical-notation/dimensions.md b/docs/user-guide/mathematical-notation/dimensions.md index fc16ad0d5..e10ef5ffd 100644 --- a/docs/user-guide/mathematical-notation/dimensions.md +++ b/docs/user-guide/mathematical-notation/dimensions.md @@ -114,6 +114,7 @@ Where: - $\mathcal{S}$ is the set of scenarios - $w_s$ is the weight for scenario $s$ - The optimizer balances performance across scenarios according to their weights +- **Both the objective effect and Penalty effect are weighted by $w_s$** (see [Penalty weighting](effects-penalty-objective.md#penalty)) ### Period Independence @@ -130,6 +131,8 @@ $$ \min \quad \sum_{y \in \mathcal{Y}} w_y \cdot \text{Objective}_y $$ +Where **both the objective effect and Penalty effect are weighted by $w_y$** (see [Penalty weighting](effects-penalty-objective.md#penalty)) + ### Shared Periodic Decisions: The Exception **Investment decisions (sizes) can be shared across all scenarios:** @@ -203,16 +206,18 @@ $$ Where: - $\mathcal{T}$ is the set of time steps -- $\mathcal{E}$ is the set of effects +- $\mathcal{E}$ is the set of effects (including the Penalty effect $E_\Phi$) - $\mathcal{S}$ is the set of scenarios - $\mathcal{Y}$ is the set of periods - $s_{e}(\cdots)$ are the effect contributions (costs, emissions, etc.) - $w_s, w_y, w_{y,s}$ are the dimension weights +- **Penalty effect is weighted identically to other effects** **See [Effects, Penalty & Objective](effects-penalty-objective.md) for complete formulations including:** - How temporal and periodic effects expand with dimensions - Detailed objective function for each dimensional case - Periodic (investment) vs temporal (operational) effect handling +- Explicit Penalty weighting formulations --- @@ -288,7 +293,7 @@ flow_system = fx.FlowSystem( # [6.0, 4.0]] # 2040: 10 × [0.6, 0.4] ``` -**Normalization:** Set `normalize_weights=False` in `Calculation` to turn of the normalization. +**Normalization:** Set `normalize_weights=False` in `Optimization` to turn off the normalization. --- diff --git a/docs/user-guide/mathematical-notation/effects-penalty-objective.md b/docs/user-guide/mathematical-notation/effects-penalty-objective.md index 0759ef5ee..1c96f3613 100644 --- a/docs/user-guide/mathematical-notation/effects-penalty-objective.md +++ b/docs/user-guide/mathematical-notation/effects-penalty-objective.md @@ -142,40 +142,86 @@ $$ ## Penalty -In addition to user-defined [Effects](#effects), every FlixOpt model includes a **Penalty** term $\Phi$ to: +Every FlixOpt model includes a special **Penalty Effect** $E_\Phi$ to: + - Prevent infeasible problems -- Simplify troubleshooting by allowing constraint violations with high cost +- Allow introducing a bias without influencing effects, simplifying results analysis + +**Key Feature:** Penalty is implemented as a standard Effect (labeled `Penalty`), so you can **add penalty contributions anywhere effects are used**: + +```python +import flixopt as fx + +# Add penalty contributions just like any other effect +on_off = fx.OnOffParameters( + effects_per_switch_on={'Penalty': 1} # Add bias against switching on this component, without adding costs +) +``` + +**Optionally Define Custom Penalty:** +Users can define their own Penalty effect with custom properties (unit, constraints, etc.): + +```python +# Define custom penalty effect (must use fx.PENALTY_EFFECT_LABEL) +custom_penalty = fx.Effect( + fx.PENALTY_EFFECT_LABEL, # Always use this constant: 'Penalty' + unit='€', + description='Penalty costs for constraint violations', + maximum_total=1e6, # Limit total penalty for debugging +) +flow_system.add_elements(custom_penalty) +``` + +If not user-defined, the Penalty effect is automatically created during modeling with default settings. + +**Periodic penalty shares** (time-independent): +$$ \label{eq:Penalty_periodic} +E_{\Phi, \text{per}} = \sum_{l \in \mathcal{L}} s_{l \rightarrow \Phi,\text{per}} +$$ -Penalty shares originate from elements, similar to effect shares: +**Temporal penalty shares** (time-dependent): +$$ \label{eq:Penalty_temporal} +E_{\Phi, \text{temp}}(\text{t}_{i}) = \sum_{l \in \mathcal{L}} s_{l \rightarrow \Phi, \text{temp}}(\text{t}_i) +$$ -$$ \label{eq:Penalty} -\Phi = \sum_{l \in \mathcal{L}} \left( s_{l \rightarrow \Phi} +\sum_{\text{t}_i \in \mathcal{T}} s_{l \rightarrow \Phi}(\text{t}_{i}) \right) +**Total penalty** (combining both domains): +$$ \label{eq:Penalty_total} +E_{\Phi} = E_{\Phi,\text{per}} + \sum_{\text{t}_i \in \mathcal{T}} E_{\Phi, \text{temp}}(\text{t}_{i}) $$ Where: - $\mathcal{L}$ is the set of all elements - $\mathcal{T}$ is the set of all timesteps -- $s_{l \rightarrow \Phi}$ is the penalty share from element $l$ +- $s_{l \rightarrow \Phi, \text{per}}$ is the periodic penalty share from element $l$ +- $s_{l \rightarrow \Phi, \text{temp}}(\text{t}_i)$ is the temporal penalty share from element $l$ at timestep $\text{t}_i$ + +**Primary usage:** Penalties occur in [Buses](elements/Bus.md) via the `excess_penalty_per_flow_hour` parameter, which allows nodal imbalances at a high cost, and in time series aggregation to allow period flexibility. -**Current usage:** Penalties primarily occur in [Buses](elements/Bus.md) via the `excess_penalty_per_flow_hour` parameter, which allows nodal imbalances at a high cost. +**Key properties:** +- Penalty shares are added via `add_share_to_effects(name, expressions={fx.PENALTY_EFFECT_LABEL: ...}, target='temporal'/'periodic')` +- Like other effects, penalty can be constrained (e.g., `maximum_total` for debugging) +- Results include breakdown: temporal, periodic, and total penalty contributions +- Penalty is always added to the objective function (cannot be disabled) +- Access via `flow_system.effects.penalty_effect` or `flow_system.effects[fx.PENALTY_EFFECT_LABEL]` +- **Scenario weighting**: Penalty is weighted identically to the objective effect—see [Time + Scenario](#time--scenario) for details --- ## Objective Function -The optimization objective minimizes the chosen effect plus any penalties: +The optimization objective minimizes the chosen effect plus the penalty effect: $$ \label{eq:Objective} -\min \left( E_{\Omega} + \Phi \right) +\min \left( E_{\Omega} + E_{\Phi} \right) $$ Where: - $E_{\Omega}$ is the chosen **objective effect** (see $\eqref{eq:Effect_Total}$) -- $\Phi$ is the [penalty](#penalty) term +- $E_{\Phi}$ is the [penalty effect](#penalty) (see $\eqref{eq:Penalty_total}$) -One effect must be designated as the objective via `is_objective=True`. +One effect must be designated as the objective via `is_objective=True`. The penalty effect is automatically created and always added to the objective. ### Multi-Criteria Optimization @@ -198,54 +244,54 @@ When the FlowSystem includes **periods** and/or **scenarios** (see [Dimensions]( ### Time Only (Base Case) $$ -\min \quad E_{\Omega} + \Phi = \sum_{\text{t}_i \in \mathcal{T}} E_{\Omega,\text{temp}}(\text{t}_i) + E_{\Omega,\text{per}} + \Phi +\min \quad E_{\Omega} + E_{\Phi} = \sum_{\text{t}_i \in \mathcal{T}} E_{\Omega,\text{temp}}(\text{t}_i) + E_{\Omega,\text{per}} + E_{\Phi,\text{per}} + \sum_{\text{t}_i \in \mathcal{T}} E_{\Phi,\text{temp}}(\text{t}_i) $$ Where: -- Temporal effects sum over time: $\sum_{\text{t}_i} E_{\Omega,\text{temp}}(\text{t}_i)$ -- Periodic effects are constant: $E_{\Omega,\text{per}}$ -- Penalty sums over time: $\Phi = \sum_{\text{t}_i} \Phi(\text{t}_i)$ +- Temporal effects sum over time: $\sum_{\text{t}_i} E_{\Omega,\text{temp}}(\text{t}_i)$ and $\sum_{\text{t}_i} E_{\Phi,\text{temp}}(\text{t}_i)$ +- Periodic effects are constant: $E_{\Omega,\text{per}}$ and $E_{\Phi,\text{per}}$ --- ### Time + Scenario $$ -\min \quad \sum_{s \in \mathcal{S}} w_s \cdot \left( E_{\Omega}(s) + \Phi(s) \right) +\min \quad \sum_{s \in \mathcal{S}} w_s \cdot \left( E_{\Omega}(s) + E_{\Phi}(s) \right) $$ Where: - $\mathcal{S}$ is the set of scenarios - $w_s$ is the weight for scenario $s$ (typically scenario probability) -- Periodic effects are **shared across scenarios**: $E_{\Omega,\text{per}}$ (same for all $s$) -- Temporal effects are **scenario-specific**: $E_{\Omega,\text{temp}}(s) = \sum_{\text{t}_i} E_{\Omega,\text{temp}}(\text{t}_i, s)$ -- Penalties are **scenario-specific**: $\Phi(s) = \sum_{\text{t}_i} \Phi(\text{t}_i, s)$ +- Periodic effects are **shared across scenarios**: $E_{\Omega,\text{per}}$ and $E_{\Phi,\text{per}}$ (same for all $s$) +- Temporal effects are **scenario-specific**: $E_{\Omega,\text{temp}}(s) = \sum_{\text{t}_i} E_{\Omega,\text{temp}}(\text{t}_i, s)$ and $E_{\Phi,\text{temp}}(s) = \sum_{\text{t}_i} E_{\Phi,\text{temp}}(\text{t}_i, s)$ **Interpretation:** - Investment decisions (periodic) made once, used across all scenarios - Operations (temporal) differ by scenario - Objective balances expected value across scenarios +- **Both $E_{\Omega}$ (objective effect) and $E_{\Phi}$ (penalty) are weighted identically by $w_s$** --- ### Time + Period $$ -\min \quad \sum_{y \in \mathcal{Y}} w_y \cdot \left( E_{\Omega}(y) + \Phi(y) \right) +\min \quad \sum_{y \in \mathcal{Y}} w_y \cdot \left( E_{\Omega}(y) + E_{\Phi}(y) \right) $$ Where: - $\mathcal{Y}$ is the set of periods (e.g., years) - $w_y$ is the weight for period $y$ (typically annual discount factor) -- Each period $y$ has **independent** periodic and temporal effects +- Each period $y$ has **independent** periodic and temporal effects (including penalty) - Each period $y$ has **independent** investment and operational decisions +- **Both $E_{\Omega}$ (objective effect) and $E_{\Phi}$ (penalty) are weighted identically by $w_y$** --- ### Time + Period + Scenario (Full Multi-Dimensional) $$ -\min \quad \sum_{y \in \mathcal{Y}} \left[ w_y \cdot E_{\Omega,\text{per}}(y) + \sum_{s \in \mathcal{S}} w_{y,s} \cdot \left( E_{\Omega,\text{temp}}(y,s) + \Phi(y,s) \right) \right] +\min \quad \sum_{y \in \mathcal{Y}} \left[ w_y \cdot \left( E_{\Omega,\text{per}}(y) + E_{\Phi,\text{per}}(y) \right) + \sum_{s \in \mathcal{S}} w_{y,s} \cdot \left( E_{\Omega,\text{temp}}(y,s) + E_{\Phi,\text{temp}}(y,s) \right) \right] $$ Where: @@ -253,15 +299,15 @@ Where: - $\mathcal{Y}$ is the set of periods - $w_y$ is the period weight (for periodic effects) - $w_{y,s}$ is the combined period-scenario weight (for temporal effects) -- **Periodic effects** $E_{\Omega,\text{per}}(y)$ are period-specific but **scenario-independent** -- **Temporal effects** $E_{\Omega,\text{temp}}(y,s) = \sum_{\text{t}_i} E_{\Omega,\text{temp}}(\text{t}_i, y, s)$ are **fully indexed** -- **Penalties** $\Phi(y,s)$ are **fully indexed** +- **Periodic effects** $E_{\Omega,\text{per}}(y)$ and $E_{\Phi,\text{per}}(y)$ are period-specific but **scenario-independent** +- **Temporal effects** $E_{\Omega,\text{temp}}(y,s) = \sum_{\text{t}_i} E_{\Omega,\text{temp}}(\text{t}_i, y, s)$ and $E_{\Phi,\text{temp}}(y,s) = \sum_{\text{t}_i} E_{\Phi,\text{temp}}(\text{t}_i, y, s)$ are **fully indexed** **Key Principle:** - Scenarios and periods are **operationally independent** (no energy/resource exchange) - Coupled **only through the weighted objective function** - **Periodic effects within a period are shared across all scenarios** (investment made once per period) - **Temporal effects are independent per scenario** (different operations under different conditions) +- **Both $E_{\Omega}$ (objective effect) and $E_{\Phi}$ (penalty) use identical weighting** ($w_y$ for periodic, $w_{y,s}$ for temporal) --- @@ -274,7 +320,8 @@ Where: | **Total temporal effect** | $E_{e,\text{temp},\text{tot}} = \sum_{\text{t}_i} E_{e,\text{temp}}(\text{t}_i)$ | Sum over time | Depends on dimensions | | **Total periodic effect** | $E_{e,\text{per}}$ | Constant | $(y)$ when periods present | | **Total effect** | $E_e = E_{e,\text{per}} + E_{e,\text{temp},\text{tot}}$ | Combined | Depends on dimensions | -| **Objective** | $\min(E_{\Omega} + \Phi)$ | With weights when multi-dimensional | See formulations above | +| **Penalty effect** | $E_\Phi = E_{\Phi,\text{per}} + E_{\Phi,\text{temp},\text{tot}}$ | Combined (same as effects) | **Weighted identically to objective effect** | +| **Objective** | $\min(E_{\Omega} + E_{\Phi})$ | With weights when multi-dimensional | See formulations above | --- diff --git a/docs/user-guide/migration-guide-v3.md b/docs/user-guide/migration-guide-v3.md index 4c7959e8f..cb6fbc55e 100644 --- a/docs/user-guide/migration-guide-v3.md +++ b/docs/user-guide/migration-guide-v3.md @@ -76,12 +76,12 @@ Terminology changed and sharing system inverted: effects now "pull" shares. --- -### FlowSystem & Calculation +### FlowSystem & Optimization | Change | Description | |--------|-------------| -| **FlowSystem copying** | Each `Calculation` gets its own copy (independent) | -| **do_modeling() return** | Returns `Calculation` object (access model via `.model` property) | +| **FlowSystem copying** | Each `Optimization` gets its own copy (independent) | +| **do_modeling() return** | Returns `Optimization` object (access model via `.model` property) | | **Storage arrays** | Arrays match timestep count (no extra element) | | **Final charge state** | Use `relative_minimum_final_charge_state` / `relative_maximum_final_charge_state` | @@ -135,7 +135,7 @@ Terminology changed and sharing system inverted: effects now "pull" shares. | `agg_group` | `aggregation_group` | | `agg_weight` | `aggregation_weight` | -??? abstract "Calculation" +??? abstract "Optimization" | Old (v2.x) | New (v3.0.0) | |------------|--------------| @@ -207,7 +207,7 @@ Terminology changed and sharing system inverted: effects now "pull" shares. | Issue | Solution | |-------|----------| | Effect shares not working | See [Effect System Redesign](#effect-system-redesign) | -| Storage dimensions wrong | See [FlowSystem & Calculation](#flowsystem-calculation) | +| Storage dimensions wrong | See [FlowSystem & Optimization](#flowsystem-optimization) | | Bus assignment error | See [String Labels](#string-labels) | | KeyError in results | See [Variable Names](#variable-names) | | `AttributeError: model` | Rename `.model` → `.submodel` | @@ -220,7 +220,7 @@ Terminology changed and sharing system inverted: effects now "pull" shares. | Category | Tasks | |----------|-------| | **Install** | • `pip install --upgrade flixopt` | -| **Breaking changes** | • Update [effect sharing](#effect-system-redesign)
• Update [variable names](#variable-names)
• Update [string labels](#string-labels)
• Fix [storage arrays](#flowsystem-calculation)
• Update [Calculation API](#flowsystem-calculation)
• Update [class names](#other-changes) | +| **Breaking changes** | • Update [effect sharing](#effect-system-redesign)
• Update [variable names](#variable-names)
• Update [string labels](#string-labels)
• Fix [storage arrays](#flowsystem-optimization)
• Update [Optimization API](#flowsystem-optimization)
• Update [class names](#other-changes) | | **Configuration** | • Enable [logging](#other-changes) if needed | | **Deprecated** | • Update [deprecated parameters](#deprecated-parameters) (recommended) | | **Testing** | • Test thoroughly
• Validate results match v2.x | diff --git a/examples/00_Minmal/minimal_example.py b/examples/00_Minmal/minimal_example.py index 9756396b3..7a94b2222 100644 --- a/examples/00_Minmal/minimal_example.py +++ b/examples/00_Minmal/minimal_example.py @@ -32,5 +32,5 @@ ), ) - calculation = fx.FullCalculation('Simulation1', flow_system).do_modeling().solve(fx.solvers.HighsSolver(0.01, 60)) - calculation.results['Heat'].plot_node_balance() + optimization = fx.Optimization('Simulation1', flow_system).solve(fx.solvers.HighsSolver(0.01, 60)) + optimization.results['Heat'].plot_node_balance() diff --git a/examples/01_Simple/simple_example.py b/examples/01_Simple/simple_example.py index d9737cf7b..c2d6d88e1 100644 --- a/examples/01_Simple/simple_example.py +++ b/examples/01_Simple/simple_example.py @@ -104,24 +104,24 @@ # --- Define and Run Calculation --- # Create a calculation object to model the Flow System - calculation = fx.FullCalculation(name='Sim1', flow_system=flow_system) - calculation.do_modeling() # Translate the model to a solvable form, creating equations and Variables + optimization = fx.Optimization(name='Sim1', flow_system=flow_system) + optimization.do_modeling() # Translate the model to a solvable form, creating equations and Variables # --- Solve the Calculation and Save Results --- - calculation.solve(fx.solvers.HighsSolver(mip_gap=0, time_limit_seconds=30)) + optimization.solve(fx.solvers.HighsSolver(mip_gap=0, time_limit_seconds=30)) # --- Analyze Results --- # Colors are automatically assigned using default colormap # Optional: Configure custom colors with - calculation.results.setup_colors() - calculation.results['Fernwärme'].plot_node_balance_pie() - calculation.results['Fernwärme'].plot_node_balance() - calculation.results['Storage'].plot_charge_state() - calculation.results.plot_heatmap('CHP(Q_th)|flow_rate') + optimization.results.setup_colors() + optimization.results['Fernwärme'].plot_node_balance_pie() + optimization.results['Fernwärme'].plot_node_balance() + optimization.results['Storage'].plot_charge_state() + optimization.results.plot_heatmap('CHP(Q_th)|flow_rate') # Convert the results for the storage component to a dataframe and display - df = calculation.results['Storage'].node_balance_with_charge_state() + df = optimization.results['Storage'].node_balance_with_charge_state() print(df) # Save results to file for later usage - calculation.results.to_file() + optimization.results.to_file() diff --git a/examples/02_Complex/complex_example.py b/examples/02_Complex/complex_example.py index cad938cb2..2913f643f 100644 --- a/examples/02_Complex/complex_example.py +++ b/examples/02_Complex/complex_example.py @@ -15,7 +15,7 @@ check_penalty = False excess_penalty = 1e5 use_chp_with_piecewise_conversion = True - time_indices = None # Define specific time steps for custom calculations, or use the entire series + time_indices = None # Define specific time steps for custom optimizations, or use the entire series # --- Define Demand and Price Profiles --- # Input data for electricity and heat demands, as well as electricity price @@ -194,17 +194,17 @@ print(f'Network app requires extra dependencies: {e}') # --- Solve FlowSystem --- - calculation = fx.FullCalculation('complex example', flow_system, time_indices) - calculation.do_modeling() + optimization = fx.Optimization('complex example', flow_system, time_indices) + optimization.do_modeling() - calculation.solve(fx.solvers.HighsSolver(0.01, 60)) + optimization.solve(fx.solvers.HighsSolver(0.01, 60)) # --- Results --- # You can analyze results directly or save them to file and reload them later. - calculation.results.to_file() + optimization.results.to_file() # But let's plot some results anyway - calculation.results.plot_heatmap('BHKW2(Q_th)|flow_rate') - calculation.results['BHKW2'].plot_node_balance() - calculation.results['Speicher'].plot_charge_state() - calculation.results['Fernwärme'].plot_node_balance_pie() + optimization.results.plot_heatmap('BHKW2(Q_th)|flow_rate') + optimization.results['BHKW2'].plot_node_balance() + optimization.results['Speicher'].plot_charge_state() + optimization.results['Fernwärme'].plot_node_balance_pie() diff --git a/examples/02_Complex/complex_example_results.py b/examples/02_Complex/complex_example_results.py index 96191c4d8..7f1123a26 100644 --- a/examples/02_Complex/complex_example_results.py +++ b/examples/02_Complex/complex_example_results.py @@ -9,7 +9,7 @@ # --- Load Results --- try: - results = fx.results.CalculationResults.from_file('results', 'complex example') + results = fx.results.Results.from_file('results', 'complex example') except FileNotFoundError as e: raise FileNotFoundError( f"Results file not found in the specified directory ('results'). " diff --git a/examples/03_Calculation_types/example_calculation_types.py b/examples/03_Optimization_modes/example_optimization_modes.py similarity index 74% rename from examples/03_Calculation_types/example_calculation_types.py rename to examples/03_Optimization_modes/example_optimization_modes.py index fa57e6f9a..d3ae566e4 100644 --- a/examples/03_Calculation_types/example_calculation_types.py +++ b/examples/03_Optimization_modes/example_optimization_modes.py @@ -10,6 +10,18 @@ import flixopt as fx + +# Get solutions for plotting for different optimizations +def get_solutions(optimizations: list, variable: str) -> xr.Dataset: + dataarrays = [] + for optimization in optimizations: + if optimization.name == 'Segmented': + dataarrays.append(optimization.results.solution_without_overlap(variable).rename(optimization.name)) + else: + dataarrays.append(optimization.results.solution[variable].rename(optimization.name)) + return xr.merge(dataarrays, join='outer') + + if __name__ == '__main__': fx.CONFIG.exploring() @@ -20,7 +32,7 @@ segment_length, overlap_length = 96, 1 # Aggregated Properties - aggregation_parameters = fx.AggregationParameters( + clustering_parameters = fx.ClusteringParameters( hours_per_period=6, nr_of_periods=4, fix_storage_flows=False, @@ -49,9 +61,9 @@ # TimeSeriesData objects TS_heat_demand = fx.TimeSeriesData(heat_demand) - TS_electricity_demand = fx.TimeSeriesData(electricity_demand, aggregation_weight=0.7) - TS_electricity_price_sell = fx.TimeSeriesData(-(electricity_price - 0.5), aggregation_group='p_el') - TS_electricity_price_buy = fx.TimeSeriesData(electricity_price + 0.5, aggregation_group='p_el') + TS_electricity_demand = fx.TimeSeriesData(electricity_demand, clustering_weight=0.7) + TS_electricity_price_sell = fx.TimeSeriesData(-(electricity_price - 0.5), clustering_group='p_el') + TS_electricity_price_buy = fx.TimeSeriesData(electricity_price + 0.5, clustering_group='p_el') flow_system = fx.FlowSystem(timesteps) flow_system.add_elements( @@ -166,42 +178,32 @@ ) flow_system.plot_network() - # Calculations - calculations: list[fx.FullCalculation | fx.AggregatedCalculation | fx.SegmentedCalculation] = [] + # Optimizations + optimizations: list[fx.Optimization | fx.ClusteredOptimization | fx.SegmentedOptimization] = [] if full: - calculation = fx.FullCalculation('Full', flow_system) - calculation.do_modeling() - calculation.solve(fx.solvers.HighsSolver(0.01 / 100, 60)) - calculations.append(calculation) + optimization = fx.Optimization('Full', flow_system.copy()) + optimization.do_modeling() + optimization.solve(fx.solvers.HighsSolver(0.01 / 100, 60)) + optimizations.append(optimization) if segmented: - calculation = fx.SegmentedCalculation('Segmented', flow_system, segment_length, overlap_length) - calculation.do_modeling_and_solve(fx.solvers.HighsSolver(0.01 / 100, 60)) - calculations.append(calculation) + optimization = fx.SegmentedOptimization('Segmented', flow_system.copy(), segment_length, overlap_length) + optimization.do_modeling_and_solve(fx.solvers.HighsSolver(0.01 / 100, 60)) + optimizations.append(optimization) if aggregated: if keep_extreme_periods: - aggregation_parameters.time_series_for_high_peaks = [TS_heat_demand] - aggregation_parameters.time_series_for_low_peaks = [TS_electricity_demand, TS_heat_demand] - calculation = fx.AggregatedCalculation('Aggregated', flow_system, aggregation_parameters) - calculation.do_modeling() - calculation.solve(fx.solvers.HighsSolver(0.01 / 100, 60)) - calculations.append(calculation) - - # Get solutions for plotting for different calculations - def get_solutions(calcs: list, variable: str) -> xr.Dataset: - dataarrays = [] - for calc in calcs: - if calc.name == 'Segmented': - dataarrays.append(calc.results.solution_without_overlap(variable).rename(calc.name)) - else: - dataarrays.append(calc.results.model.variables[variable].solution.rename(calc.name)) - return xr.merge(dataarrays) + clustering_parameters.time_series_for_high_peaks = [TS_heat_demand] + clustering_parameters.time_series_for_low_peaks = [TS_electricity_demand, TS_heat_demand] + optimization = fx.ClusteredOptimization('Aggregated', flow_system.copy(), clustering_parameters) + optimization.do_modeling() + optimization.solve(fx.solvers.HighsSolver(0.01 / 100, 60)) + optimizations.append(optimization) # --- Plotting for comparison --- fx.plotting.with_plotly( - get_solutions(calculations, 'Speicher|charge_state'), + get_solutions(optimizations, 'Speicher|charge_state'), mode='line', title='Charge State Comparison', ylabel='Charge state', @@ -209,7 +211,7 @@ def get_solutions(calcs: list, variable: str) -> xr.Dataset: ).write_html('results/Charge State.html') fx.plotting.with_plotly( - get_solutions(calculations, 'BHKW2(Q_th)|flow_rate'), + get_solutions(optimizations, 'BHKW2(Q_th)|flow_rate'), mode='line', title='BHKW2(Q_th) Flow Rate Comparison', ylabel='Flow rate', @@ -217,7 +219,7 @@ def get_solutions(calcs: list, variable: str) -> xr.Dataset: ).write_html('results/BHKW2 Thermal Power.html') fx.plotting.with_plotly( - get_solutions(calculations, 'costs(temporal)|per_timestep'), + get_solutions(optimizations, 'costs(temporal)|per_timestep'), mode='line', title='Operation Cost Comparison', ylabel='Costs [€]', @@ -225,15 +227,17 @@ def get_solutions(calcs: list, variable: str) -> xr.Dataset: ).write_html('results/Operation Costs.html') fx.plotting.with_plotly( - get_solutions(calculations, 'costs(temporal)|per_timestep').sum('time'), + get_solutions(optimizations, 'costs(temporal)|per_timestep').sum('time'), mode='stacked_bar', title='Total Cost Comparison', ylabel='Costs [€]', ).update_layout(barmode='group').write_html('results/Total Costs.html') fx.plotting.with_plotly( - pd.DataFrame([calc.durations for calc in calculations], index=[calc.name for calc in calculations]).to_xarray(), + pd.DataFrame( + [calc.durations for calc in optimizations], index=[calc.name for calc in optimizations] + ).to_xarray(), mode='stacked_bar', - ).update_layout(title='Duration Comparison', xaxis_title='Calculation type', yaxis_title='Time (s)').write_html( + ).update_layout(title='Duration Comparison', xaxis_title='Optimization type', yaxis_title='Time (s)').write_html( 'results/Speed Comparison.html' ) diff --git a/examples/04_Scenarios/scenario_example.py b/examples/04_Scenarios/scenario_example.py index 6bb920188..6ae01c4f0 100644 --- a/examples/04_Scenarios/scenario_example.py +++ b/examples/04_Scenarios/scenario_example.py @@ -196,13 +196,13 @@ # --- Define and Run Calculation --- # Create a calculation object to model the Flow System - calculation = fx.FullCalculation(name='Sim1', flow_system=flow_system) - calculation.do_modeling() # Translate the model to a solvable form, creating equations and Variables + optimization = fx.Optimization(name='Sim1', flow_system=flow_system) + optimization.do_modeling() # Translate the model to a solvable form, creating equations and Variables # --- Solve the Calculation and Save Results --- - calculation.solve(fx.solvers.HighsSolver(mip_gap=0, time_limit_seconds=30)) + optimization.solve(fx.solvers.HighsSolver(mip_gap=0, time_limit_seconds=30)) - calculation.results.setup_colors( + optimization.results.setup_colors( { 'CHP': 'red', 'Greys': ['Gastarif', 'Einspeisung', 'Heat Demand'], @@ -211,16 +211,16 @@ } ) - calculation.results.plot_heatmap('CHP(Q_th)|flow_rate') + optimization.results.plot_heatmap('CHP(Q_th)|flow_rate') # --- Analyze Results --- - calculation.results['Fernwärme'].plot_node_balance(mode='stacked_bar') - calculation.results.plot_heatmap('CHP(Q_th)|flow_rate') - calculation.results['Storage'].plot_charge_state() - calculation.results['Fernwärme'].plot_node_balance_pie(select={'period': 2020, 'scenario': 'Base Case'}) + optimization.results['Fernwärme'].plot_node_balance(mode='stacked_bar') + optimization.results.plot_heatmap('CHP(Q_th)|flow_rate') + optimization.results['Storage'].plot_charge_state() + optimization.results['Fernwärme'].plot_node_balance_pie(select={'period': 2020, 'scenario': 'Base Case'}) # Convert the results for the storage component to a dataframe and display - df = calculation.results['Storage'].node_balance_with_charge_state() + df = optimization.results['Storage'].node_balance_with_charge_state() # Save results to file for later usage - calculation.results.to_file() + optimization.results.to_file() diff --git a/examples/05_Two-stage-optimization/two_stage_optimization.py b/examples/05_Two-stage-optimization/two_stage_optimization.py index b61af3b2a..d8f4e87fe 100644 --- a/examples/05_Two-stage-optimization/two_stage_optimization.py +++ b/examples/05_Two-stage-optimization/two_stage_optimization.py @@ -125,13 +125,13 @@ # Separate optimization of flow sizes and dispatch start = timeit.default_timer() - calculation_sizing = fx.FullCalculation('Sizing', flow_system.resample('2h')) + calculation_sizing = fx.Optimization('Sizing', flow_system.resample('2h')) calculation_sizing.do_modeling() calculation_sizing.solve(fx.solvers.HighsSolver(0.1 / 100, 60)) timer_sizing = timeit.default_timer() - start start = timeit.default_timer() - calculation_dispatch = fx.FullCalculation('Dispatch', flow_system) + calculation_dispatch = fx.Optimization('Dispatch', flow_system) calculation_dispatch.do_modeling() calculation_dispatch.fix_sizes(calculation_sizing.results.solution) calculation_dispatch.solve(fx.solvers.HighsSolver(0.1 / 100, 60)) @@ -144,7 +144,7 @@ # Optimization of both flow sizes and dispatch together start = timeit.default_timer() - calculation_combined = fx.FullCalculation('Combined', flow_system) + calculation_combined = fx.Optimization('Combined', flow_system) calculation_combined.do_modeling() calculation_combined.solve(fx.solvers.HighsSolver(0.1 / 100, 600)) timer_combined = timeit.default_timer() - start diff --git a/flixopt/__init__.py b/flixopt/__init__.py index a55a57b3f..3941cb491 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -14,8 +14,10 @@ # Import commonly used classes and functions from . import linear_converters, plotting, results, solvers -from .aggregation import AggregationParameters + +# Import old Calculation classes for backwards compatibility (deprecated) from .calculation import AggregatedCalculation, FullCalculation, SegmentedCalculation +from .clustering import AggregationParameters, ClusteringParameters # AggregationParameters is deprecated from .components import ( LinearConverter, Sink, @@ -26,11 +28,14 @@ ) from .config import CONFIG, change_logging_level from .core import TimeSeriesData -from .effects import Effect +from .effects import PENALTY_EFFECT_LABEL, Effect from .elements import Bus, Flow from .flow_system import FlowSystem from .interface import InvestParameters, OnOffParameters, Piece, Piecewise, PiecewiseConversion, PiecewiseEffects +# Import new Optimization classes +from .optimization import ClusteredOptimization, Optimization, SegmentedOptimization + __all__ = [ 'TimeSeriesData', 'CONFIG', @@ -38,6 +43,7 @@ 'Flow', 'Bus', 'Effect', + 'PENALTY_EFFECT_LABEL', 'Source', 'Sink', 'SourceAndSink', @@ -45,16 +51,22 @@ 'LinearConverter', 'Transmission', 'FlowSystem', + # New Optimization classes (preferred) + 'Optimization', + 'ClusteredOptimization', + 'SegmentedOptimization', + # Old Calculation classes (deprecated, for backwards compatibility) 'FullCalculation', - 'SegmentedCalculation', 'AggregatedCalculation', + 'SegmentedCalculation', 'InvestParameters', 'OnOffParameters', 'Piece', 'Piecewise', 'PiecewiseConversion', 'PiecewiseEffects', - 'AggregationParameters', + 'ClusteringParameters', + 'AggregationParameters', # Deprecated, use ClusteringParameters 'plotting', 'results', 'linear_converters', diff --git a/flixopt/calculation.py b/flixopt/calculation.py index ee6742c22..1211c6763 100644 --- a/flixopt/calculation.py +++ b/flixopt/calculation.py @@ -1,49 +1,59 @@ """ -This module contains the Calculation functionality for the flixopt framework. -It is used to calculate a FlowSystemModel for a given FlowSystem through a solver. -There are three different Calculation types: - 1. FullCalculation: Calculates the FlowSystemModel for the full FlowSystem - 2. AggregatedCalculation: Calculates the FlowSystemModel for the full FlowSystem, but aggregates the TimeSeriesData. - This simplifies the mathematical model and usually speeds up the solving process. - 3. SegmentedCalculation: Solves a FlowSystemModel for each individual Segment of the FlowSystem. +This module provides backwards-compatible aliases for the renamed Optimization classes. + +DEPRECATED: This module is deprecated. Use the optimization module instead. +The following classes have been renamed: + - Calculation -> Optimization + - FullCalculation -> Optimization (now the standard, no "Full" prefix) + - AggregatedCalculation -> ClusteredOptimization + - SegmentedCalculation -> SegmentedOptimization + +Import from flixopt.optimization or use the new names from flixopt directly. """ from __future__ import annotations import logging -import math -import pathlib -import sys -import timeit import warnings -from collections import Counter -from typing import TYPE_CHECKING, Annotated, Any - -import numpy as np -from tqdm import tqdm - -from . import io as fx_io -from .aggregation import Aggregation, AggregationModel, AggregationParameters -from .components import Storage -from .config import CONFIG -from .core import DataConverter, TimeSeriesData, drop_constant_arrays -from .features import InvestmentModel -from .flow_system import FlowSystem -from .results import CalculationResults, SegmentedCalculationResults +from typing import TYPE_CHECKING + +from .config import DEPRECATION_REMOVAL_VERSION +from .optimization import ( + ClusteredOptimization as _ClusteredOptimization, +) +from .optimization import ( + Optimization as _Optimization, +) +from .optimization import ( + SegmentedOptimization as _SegmentedOptimization, +) if TYPE_CHECKING: + import pathlib + from typing import Annotated + import pandas as pd - import xarray as xr + from .clustering import AggregationParameters from .elements import Component - from .solvers import _Solver - from .structure import FlowSystemModel + from .flow_system import FlowSystem logger = logging.getLogger('flixopt') -class Calculation: +def _deprecation_warning(old_name: str, new_name: str): + """Issue a deprecation warning for renamed classes.""" + warnings.warn( + f'{old_name} is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. Use {new_name} instead.', + DeprecationWarning, + stacklevel=3, + ) + + +class Calculation(_Optimization): """ + DEPRECATED: Use Optimization instead. + class for defined way of solving a flow_system optimization Args: @@ -54,8 +64,6 @@ class for defined way of solving a flow_system optimization active_timesteps: Deprecated. Use FlowSystem.sel(time=...) or FlowSystem.isel(time=...) instead. """ - model: FlowSystemModel | None - def __init__( self, name: str, @@ -67,114 +75,14 @@ def __init__( folder: pathlib.Path | None = None, normalize_weights: bool = True, ): - self.name = name - if flow_system.used_in_calculation: - logger.warning( - f'This FlowSystem is already used in a calculation:\n{flow_system}\n' - f'Creating a copy of the FlowSystem for Calculation "{self.name}".' - ) - flow_system = flow_system.copy() - - if active_timesteps is not None: - warnings.warn( - "The 'active_timesteps' parameter is deprecated and will be removed in a future version. " - 'Use flow_system.sel(time=timesteps) or flow_system.isel(time=indices) before passing ' - 'the FlowSystem to the Calculation instead.', - DeprecationWarning, - stacklevel=2, - ) - flow_system = flow_system.sel(time=active_timesteps) - self._active_timesteps = active_timesteps # deprecated - self.normalize_weights = normalize_weights - - flow_system._used_in_calculation = True - - self.flow_system = flow_system - self.model = None - - self.durations = {'modeling': 0.0, 'solving': 0.0, 'saving': 0.0} - self.folder = pathlib.Path.cwd() / 'results' if folder is None else pathlib.Path(folder) - self.results: CalculationResults | None = None - - if self.folder.exists() and not self.folder.is_dir(): - raise NotADirectoryError(f'Path {self.folder} exists and is not a directory.') - self.folder.mkdir(parents=False, exist_ok=True) - - @property - def main_results(self) -> dict[str, int | float | dict]: - from flixopt.features import InvestmentModel - - main_results = { - 'Objective': self.model.objective.value, - 'Penalty': self.model.effects.penalty.total.solution.values, - 'Effects': { - f'{effect.label} [{effect.unit}]': { - 'temporal': effect.submodel.temporal.total.solution.values, - 'periodic': effect.submodel.periodic.total.solution.values, - 'total': effect.submodel.total.solution.values, - } - for effect in sorted(self.flow_system.effects.values(), key=lambda e: e.label_full.upper()) - }, - 'Invest-Decisions': { - 'Invested': { - model.label_of_element: model.size.solution - for component in self.flow_system.components.values() - for model in component.submodel.all_submodels - if isinstance(model, InvestmentModel) and model.size.solution.max() >= CONFIG.Modeling.epsilon - }, - 'Not invested': { - model.label_of_element: model.size.solution - for component in self.flow_system.components.values() - for model in component.submodel.all_submodels - if isinstance(model, InvestmentModel) and model.size.solution.max() < CONFIG.Modeling.epsilon - }, - }, - 'Buses with excess': [ - { - bus.label_full: { - 'input': bus.submodel.excess_input.solution.sum('time'), - 'output': bus.submodel.excess_output.solution.sum('time'), - } - } - for bus in self.flow_system.buses.values() - if bus.with_excess - and ( - bus.submodel.excess_input.solution.sum() > 1e-3 or bus.submodel.excess_output.solution.sum() > 1e-3 - ) - ], - } - - return fx_io.round_nested_floats(main_results) - - @property - def summary(self): - return { - 'Name': self.name, - 'Number of timesteps': len(self.flow_system.timesteps), - 'Calculation Type': self.__class__.__name__, - 'Constraints': self.model.constraints.ncons, - 'Variables': self.model.variables.nvars, - 'Main Results': self.main_results, - 'Durations': self.durations, - 'Config': CONFIG.to_dict(), - } - - @property - def active_timesteps(self) -> pd.DatetimeIndex: - warnings.warn( - 'active_timesteps is deprecated. Use flow_system.sel(time=...) or flow_system.isel(time=...) instead.', - DeprecationWarning, - stacklevel=2, - ) - return self._active_timesteps - - @property - def modeled(self) -> bool: - return True if self.model is not None else False - - -class FullCalculation(Calculation): + _deprecation_warning('Calculation', 'Optimization') + super().__init__(name, flow_system, active_timesteps, folder, normalize_weights) + + +class FullCalculation(_Optimization): """ + DEPRECATED: Use Optimization instead (the "Full" prefix has been removed). + FullCalculation solves the complete optimization problem using all time steps. This is the most comprehensive calculation type that considers every time step @@ -188,109 +96,38 @@ class FullCalculation(Calculation): active_timesteps: Deprecated. Use FlowSystem.sel(time=...) or FlowSystem.isel(time=...) instead. """ - def do_modeling(self) -> FullCalculation: - t_start = timeit.default_timer() - self.flow_system.connect_and_transform() - - self.model = self.flow_system.create_model(self.normalize_weights) - self.model.do_modeling() - - self.durations['modeling'] = round(timeit.default_timer() - t_start, 2) - return self - - def fix_sizes(self, ds: xr.Dataset, decimal_rounding: int | None = 5) -> FullCalculation: - """Fix the sizes of the calculations to specified values. - - Args: - ds: The dataset that contains the variable names mapped to their sizes. If None, the dataset is loaded from the results. - decimal_rounding: The number of decimal places to round the sizes to. If no rounding is applied, numerical errors might lead to infeasibility. - """ - if not self.modeled: - raise RuntimeError('Model was not created. Call do_modeling() first.') - if decimal_rounding is not None: - ds = ds.round(decimal_rounding) - - for name, da in ds.data_vars.items(): - if '|size' not in name: - continue - if name not in self.model.variables: - logger.debug(f'Variable {name} not found in calculation model. Skipping.') - continue - - con = self.model.add_constraints( - self.model[name] == da, - name=f'{name}-fixed', - ) - logger.debug(f'Fixed "{name}":\n{con}') - - return self - - def solve( - self, solver: _Solver, log_file: pathlib.Path | None = None, log_main_results: bool | None = None - ) -> FullCalculation: - # Auto-call do_modeling() if not already done - if not self.modeled: - logger.info('Model not yet created. Calling do_modeling() automatically.') - self.do_modeling() - - t_start = timeit.default_timer() - - self.model.solve( - log_fn=pathlib.Path(log_file) if log_file is not None else self.folder / f'{self.name}.log', - solver_name=solver.name, - **solver.options, - ) - self.durations['solving'] = round(timeit.default_timer() - t_start, 2) - logger.success(f'Model solved with {solver.name} in {self.durations["solving"]:.2f} seconds.') - logger.info(f'Model status after solve: {self.model.status}') - - if self.model.status == 'warning': - # Save the model and the flow_system to file in case of infeasibility - paths = fx_io.CalculationResultsPaths(self.folder, self.name) - from .io import document_linopy_model - - document_linopy_model(self.model, paths.model_documentation) - self.flow_system.to_netcdf(paths.flow_system) - raise RuntimeError( - f'Model was infeasible. Please check {paths.model_documentation=} and {paths.flow_system=} for more information.' - ) - - # Log the formatted output - should_log = log_main_results if log_main_results is not None else CONFIG.Solving.log_main_results - if should_log and logger.isEnabledFor(logging.INFO): - logger.info( - f'{" Main Results ":#^80}\n' + fx_io.format_yaml_string(self.main_results, compact_numeric_lists=True) - ) - - self.results = CalculationResults.from_calculation(self) - - return self - - -class AggregatedCalculation(FullCalculation): + def __init__( + self, + name: str, + flow_system: FlowSystem, + active_timesteps: Annotated[ + pd.DatetimeIndex | None, + 'DEPRECATED: Use flow_system.sel(time=...) or flow_system.isel(time=...) instead', + ] = None, + folder: pathlib.Path | None = None, + normalize_weights: bool = True, + ): + _deprecation_warning('FullCalculation', 'Optimization') + super().__init__(name, flow_system, active_timesteps, folder, normalize_weights) + + +class AggregatedCalculation(_ClusteredOptimization): """ + DEPRECATED: Use ClusteredOptimization instead. + AggregatedCalculation reduces computational complexity by clustering time series into typical periods. This calculation approach aggregates time series data using clustering techniques (tsam) to identify representative time periods, significantly reducing computation time while maintaining solution accuracy. - Note: - The quality of the solution depends on the choice of aggregation parameters. - The optimal parameters depend on the specific problem and the characteristics of the time series data. - For more information, refer to the [tsam documentation](https://tsam.readthedocs.io/en/latest/). - Args: name: Name of the calculation flow_system: FlowSystem to be optimized aggregation_parameters: Parameters for aggregation. See AggregationParameters class documentation components_to_clusterize: list of Components to perform aggregation on. If None, all components are aggregated. This equalizes variables in the components according to the typical periods computed in the aggregation - active_timesteps: DatetimeIndex of timesteps to use for calculation. If None, all timesteps are used + active_timesteps: DatetimeIndex of timesteps to use for optimization. If None, all timesteps are used folder: Folder where results should be saved. If None, current working directory is used - - Attributes: - aggregation (Aggregation | None): Contains the clustered time series data - aggregation_model (AggregationModel | None): Contains Variables and Constraints that equalize clusters of the time series data """ def __init__( @@ -305,218 +142,23 @@ def __init__( ] = None, folder: pathlib.Path | None = None, ): - if flow_system.scenarios is not None: - raise ValueError('Aggregation is not supported for scenarios yet. Please use FullCalculation instead.') - super().__init__(name, flow_system, active_timesteps, folder=folder) - self.aggregation_parameters = aggregation_parameters - self.components_to_clusterize = components_to_clusterize - self.aggregation: Aggregation | None = None - self.aggregation_model: AggregationModel | None = None - - def do_modeling(self) -> AggregatedCalculation: - t_start = timeit.default_timer() - self.flow_system.connect_and_transform() - self._perform_aggregation() - - # Model the System - self.model = self.flow_system.create_model(self.normalize_weights) - self.model.do_modeling() - # Add Aggregation Submodel after modeling the rest - self.aggregation_model = AggregationModel( - self.model, self.aggregation_parameters, self.flow_system, self.aggregation, self.components_to_clusterize - ) - self.aggregation_model.do_modeling() - self.durations['modeling'] = round(timeit.default_timer() - t_start, 2) - return self - - def _perform_aggregation(self): - from .aggregation import Aggregation - - t_start_agg = timeit.default_timer() - - # Validation - dt_min = float(self.flow_system.hours_per_timestep.min().item()) - dt_max = float(self.flow_system.hours_per_timestep.max().item()) - if not dt_min == dt_max: - raise ValueError( - f'Aggregation failed due to inconsistent time step sizes:' - f'delta_t varies from {dt_min} to {dt_max} hours.' - ) - ratio = self.aggregation_parameters.hours_per_period / dt_max - if not np.isclose(ratio, round(ratio), atol=1e-9): - raise ValueError( - f'The selected {self.aggregation_parameters.hours_per_period=} does not match the time ' - f'step size of {dt_max} hours. It must be an integer multiple of {dt_max} hours.' - ) - - logger.info(f'{"":#^80}') - logger.info(f'{" Aggregating TimeSeries Data ":#^80}') - - ds = self.flow_system.to_dataset() - - temporaly_changing_ds = drop_constant_arrays(ds, dim='time') - - # Aggregation - creation of aggregated timeseries: - self.aggregation = Aggregation( - original_data=temporaly_changing_ds.to_dataframe(), - hours_per_time_step=float(dt_min), - hours_per_period=self.aggregation_parameters.hours_per_period, - nr_of_periods=self.aggregation_parameters.nr_of_periods, - weights=self.calculate_aggregation_weights(temporaly_changing_ds), - time_series_for_high_peaks=self.aggregation_parameters.labels_for_high_peaks, - time_series_for_low_peaks=self.aggregation_parameters.labels_for_low_peaks, - ) - - self.aggregation.cluster() - self.aggregation.plot(show=CONFIG.Plotting.default_show, save=self.folder / 'aggregation.html') - if self.aggregation_parameters.aggregate_data_and_fix_non_binary_vars: - ds = self.flow_system.to_dataset() - for name, series in self.aggregation.aggregated_data.items(): - da = ( - DataConverter.to_dataarray(series, self.flow_system.coords) - .rename(name) - .assign_attrs(ds[name].attrs) - ) - if TimeSeriesData.is_timeseries_data(da): - da = TimeSeriesData.from_dataarray(da) - - ds[name] = da - - self.flow_system = FlowSystem.from_dataset(ds) - self.flow_system.connect_and_transform() - self.durations['aggregation'] = round(timeit.default_timer() - t_start_agg, 2) - - @classmethod - def calculate_aggregation_weights(cls, ds: xr.Dataset) -> dict[str, float]: - """Calculate weights for all datavars in the dataset. Weights are pulled from the attrs of the datavars.""" - - groups = [da.attrs['aggregation_group'] for da in ds.data_vars.values() if 'aggregation_group' in da.attrs] - group_counts = Counter(groups) - - # Calculate weight for each group (1/count) - group_weights = {group: 1 / count for group, count in group_counts.items()} - - weights = {} - for name, da in ds.data_vars.items(): - group_weight = group_weights.get(da.attrs.get('aggregation_group')) - if group_weight is not None: - weights[name] = group_weight - else: - weights[name] = da.attrs.get('aggregation_weight', 1) - - if np.all(np.isclose(list(weights.values()), 1, atol=1e-6)): - logger.info('All Aggregation weights were set to 1') - - return weights - - -class SegmentedCalculation(Calculation): - """Solve large optimization problems by dividing time horizon into (overlapping) segments. - - This class addresses memory and computational limitations of large-scale optimization - problems by decomposing the time horizon into smaller overlapping segments that are - solved sequentially. Each segment uses final values from the previous segment as - initial conditions, ensuring dynamic continuity across the solution. - - Key Concepts: - **Temporal Decomposition**: Divides long time horizons into manageable segments - **Overlapping Windows**: Segments share timesteps to improve storage dynamics - **Value Transfer**: Final states of one segment become initial states of the next - **Sequential Solving**: Each segment solved independently but with coupling - - Limitations and Constraints: - **Investment Parameters**: InvestParameters are not supported in segmented calculations - as investment decisions must be made for the entire time horizon, not per segment. - - **Global Constraints**: Time-horizon-wide constraints (flow_hours_total_min/max, - load_factor_min/max) may produce suboptimal results as they cannot be enforced - globally across segments. - - **Storage Dynamics**: While overlap helps, storage optimization may be suboptimal - compared to full-horizon solutions due to limited foresight in each segment. + _deprecation_warning('AggregatedCalculation', 'ClusteredOptimization') + super().__init__(name, flow_system, aggregation_parameters, components_to_clusterize, active_timesteps, folder) + + +class SegmentedCalculation(_SegmentedOptimization): + """ + DEPRECATED: Use SegmentedOptimization instead. + + Solve large optimization problems by dividing time horizon into (overlapping) segments. Args: name: Unique identifier for the calculation, used in result files and logging. flow_system: The FlowSystem to optimize, containing all components, flows, and buses. timesteps_per_segment: Number of timesteps in each segment (excluding overlap). - Must be > 2 to avoid internal side effects. Larger values provide better - optimization at the cost of memory and computation time. overlap_timesteps: Number of additional timesteps added to each segment. - Improves storage optimization by providing lookahead. Higher values - improve solution quality but increase computational cost. - nr_of_previous_values: Number of previous timestep values to transfer between - segments for initialization. Typically 1 is sufficient. + nr_of_previous_values: Number of previous timestep values to transfer between segments for initialization. folder: Directory for saving results. Defaults to current working directory + 'results'. - - Examples: - Annual optimization with monthly segments: - - ```python - # 8760 hours annual data with monthly segments (730 hours) and 48-hour overlap - segmented_calc = SegmentedCalculation( - name='annual_energy_system', - flow_system=energy_system, - timesteps_per_segment=730, # ~1 month - overlap_timesteps=48, # 2 days overlap - folder=Path('results/segmented'), - ) - segmented_calc.do_modeling_and_solve(solver='gurobi') - ``` - - Weekly optimization with daily overlap: - - ```python - # Weekly segments for detailed operational planning - weekly_calc = SegmentedCalculation( - name='weekly_operations', - flow_system=industrial_system, - timesteps_per_segment=168, # 1 week (hourly data) - overlap_timesteps=24, # 1 day overlap - nr_of_previous_values=1, - ) - ``` - - Large-scale system with minimal overlap: - - ```python - # Large system with minimal overlap for computational efficiency - large_calc = SegmentedCalculation( - name='large_scale_grid', - flow_system=grid_system, - timesteps_per_segment=100, # Shorter segments - overlap_timesteps=5, # Minimal overlap - ) - ``` - - Design Considerations: - **Segment Size**: Balance between solution quality and computational efficiency. - Larger segments provide better optimization but require more memory and time. - - **Overlap Duration**: More overlap improves storage dynamics and reduces - end-effects but increases computational cost. Typically 5-10% of segment length. - - **Storage Systems**: Systems with large storage components benefit from longer - overlaps to capture charge/discharge cycles effectively. - - **Investment Decisions**: Use FullCalculation for problems requiring investment - optimization, as SegmentedCalculation cannot handle investment parameters. - - Common Use Cases: - - **Annual Planning**: Long-term planning with seasonal variations - - **Large Networks**: Spatially or temporally large energy systems - - **Memory-Limited Systems**: When full optimization exceeds available memory - - **Operational Planning**: Detailed short-term optimization with limited foresight - - **Sensitivity Analysis**: Quick approximate solutions for parameter studies - - Performance Tips: - - Start with FullCalculation and use this class if memory issues occur - - Use longer overlaps for systems with significant storage - - Monitor solution quality at segment boundaries for discontinuities - - Warning: - The evaluation of the solution is a bit more complex than FullCalculation or AggregatedCalculation - due to the overlapping individual solutions. - """ def __init__( @@ -528,209 +170,8 @@ def __init__( nr_of_previous_values: int = 1, folder: pathlib.Path | None = None, ): - super().__init__(name, flow_system, folder=folder) - self.timesteps_per_segment = timesteps_per_segment - self.overlap_timesteps = overlap_timesteps - self.nr_of_previous_values = nr_of_previous_values - self.sub_calculations: list[FullCalculation] = [] - - self.segment_names = [ - f'Segment_{i + 1}' for i in range(math.ceil(len(self.all_timesteps) / self.timesteps_per_segment)) - ] - self._timesteps_per_segment = self._calculate_timesteps_per_segment() - - assert timesteps_per_segment > 2, 'The Segment length must be greater 2, due to unwanted internal side effects' - assert self.timesteps_per_segment_with_overlap <= len(self.all_timesteps), ( - f'{self.timesteps_per_segment_with_overlap=} cant be greater than the total length {len(self.all_timesteps)}' - ) - - self.flow_system._connect_network() # Connect network to ensure that all Flows know their Component - # Storing all original start values - self._original_start_values = { - **{flow.label_full: flow.previous_flow_rate for flow in self.flow_system.flows.values()}, - **{ - comp.label_full: comp.initial_charge_state - for comp in self.flow_system.components.values() - if isinstance(comp, Storage) - }, - } - self._transfered_start_values: list[dict[str, Any]] = [] - - def _create_sub_calculations(self): - for i, (segment_name, timesteps_of_segment) in enumerate( - zip(self.segment_names, self._timesteps_per_segment, strict=True) - ): - calc = FullCalculation(f'{self.name}-{segment_name}', self.flow_system.sel(time=timesteps_of_segment)) - calc.flow_system._connect_network() # Connect to have Correct names of Flows! - - self.sub_calculations.append(calc) - logger.info( - f'{segment_name} [{i + 1:>2}/{len(self.segment_names):<2}] ' - f'({timesteps_of_segment[0]} -> {timesteps_of_segment[-1]}):' - ) - - def _solve_single_segment( - self, - i: int, - calculation: FullCalculation, - solver: _Solver, - log_file: pathlib.Path | None, - log_main_results: bool, - suppress_output: bool, - ) -> None: - """Solve a single segment calculation.""" - if i > 0 and self.nr_of_previous_values > 0: - self._transfer_start_values(i) - - calculation.do_modeling() - - # Warn about Investments, but only in first run - if i == 0: - invest_elements = [ - model.label_full - for component in calculation.flow_system.components.values() - for model in component.submodel.all_submodels - if isinstance(model, InvestmentModel) - ] - if invest_elements: - logger.critical( - f'Investments are not supported in Segmented Calculation! ' - f'Following InvestmentModels were found: {invest_elements}' - ) - - log_path = pathlib.Path(log_file) if log_file is not None else self.folder / f'{self.name}.log' - - if suppress_output: - with fx_io.suppress_output(): - calculation.solve(solver, log_file=log_path, log_main_results=log_main_results) - else: - calculation.solve(solver, log_file=log_path, log_main_results=log_main_results) - - def do_modeling_and_solve( - self, - solver: _Solver, - log_file: pathlib.Path | None = None, - log_main_results: bool = False, - show_individual_solves: bool = False, - ) -> SegmentedCalculation: - """Model and solve all segments of the segmented calculation. - - This method creates sub-calculations for each time segment, then iteratively - models and solves each segment. It supports two output modes: a progress bar - for compact output, or detailed individual solve information. - - Args: - solver: The solver instance to use for optimization (e.g., Gurobi, HiGHS). - log_file: Optional path to the solver log file. If None, defaults to - folder/name.log. - log_main_results: Whether to log main results (objective, effects, etc.) - after each segment solve. Defaults to False. - show_individual_solves: If True, shows detailed output for each segment - solve with logger messages. If False (default), shows a compact progress - bar with suppressed solver output for cleaner display. - - Returns: - Self, for method chaining. - - Note: - The method automatically transfers all start values between segments to ensure - continuity of storage states and flow rates across segment boundaries. - """ - logger.info(f'{"":#^80}') - logger.info(f'{" Segmented Solving ":#^80}') - self._create_sub_calculations() - - if show_individual_solves: - # Path 1: Show individual solves with detailed output - for i, calculation in enumerate(self.sub_calculations): - logger.info( - f'Solving segment {i + 1}/{len(self.sub_calculations)}: ' - f'{calculation.flow_system.timesteps[0]} -> {calculation.flow_system.timesteps[-1]}' - ) - self._solve_single_segment(i, calculation, solver, log_file, log_main_results, suppress_output=False) - else: - # Path 2: Show only progress bar with suppressed output - progress_bar = tqdm( - enumerate(self.sub_calculations), - total=len(self.sub_calculations), - desc='Solving segments', - unit='segment', - file=sys.stdout, - disable=not CONFIG.Solving.log_to_console, - ) - - try: - for i, calculation in progress_bar: - progress_bar.set_description( - f'Solving ({calculation.flow_system.timesteps[0]} -> {calculation.flow_system.timesteps[-1]})' - ) - self._solve_single_segment(i, calculation, solver, log_file, log_main_results, suppress_output=True) - finally: - progress_bar.close() - - for calc in self.sub_calculations: - for key, value in calc.durations.items(): - self.durations[key] += value - - logger.success(f'Model solved with {solver.name} in {self.durations["solving"]:.2f} seconds.') - - self.results = SegmentedCalculationResults.from_calculation(self) - - return self - - def _transfer_start_values(self, i: int): - """ - This function gets the last values of the previous solved segment and - inserts them as start values for the next segment - """ - timesteps_of_prior_segment = self.sub_calculations[i - 1].flow_system.timesteps_extra - - start = self.sub_calculations[i].flow_system.timesteps[0] - start_previous_values = timesteps_of_prior_segment[self.timesteps_per_segment - self.nr_of_previous_values] - end_previous_values = timesteps_of_prior_segment[self.timesteps_per_segment - 1] - - logger.debug( - f'Start of next segment: {start}. Indices of previous values: {start_previous_values} -> {end_previous_values}' - ) - current_flow_system = self.sub_calculations[i - 1].flow_system - next_flow_system = self.sub_calculations[i].flow_system - - start_values_of_this_segment = {} - - for current_flow in current_flow_system.flows.values(): - next_flow = next_flow_system.flows[current_flow.label_full] - next_flow.previous_flow_rate = current_flow.submodel.flow_rate.solution.sel( - time=slice(start_previous_values, end_previous_values) - ).values - start_values_of_this_segment[current_flow.label_full] = next_flow.previous_flow_rate - - for current_comp in current_flow_system.components.values(): - next_comp = next_flow_system.components[current_comp.label_full] - if isinstance(next_comp, Storage): - next_comp.initial_charge_state = current_comp.submodel.charge_state.solution.sel(time=start).item() - start_values_of_this_segment[current_comp.label_full] = next_comp.initial_charge_state - - self._transfered_start_values.append(start_values_of_this_segment) - - def _calculate_timesteps_per_segment(self) -> list[pd.DatetimeIndex]: - timesteps_per_segment = [] - for i, _ in enumerate(self.segment_names): - start = self.timesteps_per_segment * i - end = min(start + self.timesteps_per_segment_with_overlap, len(self.all_timesteps)) - timesteps_per_segment.append(self.all_timesteps[start:end]) - return timesteps_per_segment - - @property - def timesteps_per_segment_with_overlap(self): - return self.timesteps_per_segment + self.overlap_timesteps - - @property - def start_values_of_segments(self) -> list[dict[str, Any]]: - """Gives an overview of the start values of all Segments""" - return [{name: value for name, value in self._original_start_values.items()}] + [ - start_values for start_values in self._transfered_start_values - ] - - @property - def all_timesteps(self) -> pd.DatetimeIndex: - return self.flow_system.timesteps + _deprecation_warning('SegmentedCalculation', 'SegmentedOptimization') + super().__init__(name, flow_system, timesteps_per_segment, overlap_timesteps, nr_of_previous_values, folder) + + +__all__ = ['Calculation', 'FullCalculation', 'AggregatedCalculation', 'SegmentedCalculation'] diff --git a/flixopt/aggregation.py b/flixopt/clustering.py similarity index 83% rename from flixopt/aggregation.py rename to flixopt/clustering.py index adaed3e42..2fbd65318 100644 --- a/flixopt/aggregation.py +++ b/flixopt/clustering.py @@ -1,6 +1,6 @@ """ -This module contains the Aggregation functionality for the flixopt framework. -Through this, aggregating TimeSeriesData is possible. +This module contains the Clustering functionality for the flixopt framework. +Through this, clustering TimeSeriesData is possible. """ from __future__ import annotations @@ -9,10 +9,13 @@ import logging import pathlib import timeit +import warnings as _warnings from typing import TYPE_CHECKING import numpy as np +from .config import DEPRECATION_REMOVAL_VERSION + try: import tsam.timeseriesaggregation as tsam @@ -40,9 +43,9 @@ logger = logging.getLogger('flixopt') -class Aggregation: +class Clustering: """ - aggregation organizing class + Clustering organizing class """ def __init__( @@ -239,7 +242,7 @@ def get_equation_indices(self, skip_first_index_of_period: bool = True) -> tuple return np.array(idx_var1), np.array(idx_var2) -class AggregationParameters: +class ClusteringParameters: def __init__( self, hours_per_period: float, @@ -252,7 +255,7 @@ def __init__( time_series_for_low_peaks: list[TimeSeriesData] | None = None, ): """ - Initializes aggregation parameters for time series data + Initializes clustering parameters for time series data Args: hours_per_period: Duration of each period in hours. @@ -295,26 +298,26 @@ def use_low_peaks(self) -> bool: return bool(self.time_series_for_low_peaks) -class AggregationModel(Submodel): - """The AggregationModel holds equations and variables related to the Aggregation of a FlowSystem. +class ClusteringModel(Submodel): + """The ClusteringModel holds equations and variables related to the Clustering of a FlowSystem. It creates Equations that equates indices of variables, and introduces penalties related to binary variables, that escape the equation to their related binaries in other periods""" def __init__( self, model: FlowSystemModel, - aggregation_parameters: AggregationParameters, + clustering_parameters: ClusteringParameters, flow_system: FlowSystem, - aggregation_data: Aggregation, + clustering_data: Clustering, components_to_clusterize: list[Component] | None, ): """ Modeling-Element for "index-equating"-equations """ - super().__init__(model, label_of_element='Aggregation', label_of_model='Aggregation') + super().__init__(model, label_of_element='Clustering', label_of_model='Clustering') self.flow_system = flow_system - self.aggregation_parameters = aggregation_parameters - self.aggregation_data = aggregation_data + self.clustering_parameters = clustering_parameters + self.clustering_data = clustering_data self.components_to_clusterize = components_to_clusterize def do_modeling(self): @@ -323,7 +326,7 @@ def do_modeling(self): else: components = [component for component in self.components_to_clusterize] - indices = self.aggregation_data.get_equation_indices(skip_first_index_of_period=True) + indices = self.clustering_data.get_equation_indices(skip_first_index_of_period=True) time_variables: set[str] = { name for name in self._model.variables if 'time' in self._model.variables[name].dims @@ -332,22 +335,30 @@ def do_modeling(self): binary_time_variables: set[str] = time_variables & binary_variables for component in components: - if isinstance(component, Storage) and not self.aggregation_parameters.fix_storage_flows: + if isinstance(component, Storage) and not self.clustering_parameters.fix_storage_flows: continue # Fix Nothing in The Storage all_variables_of_component = set(component.submodel.variables) - if self.aggregation_parameters.aggregate_data_and_fix_non_binary_vars: + if self.clustering_parameters.aggregate_data_and_fix_non_binary_vars: relevant_variables = component.submodel.variables[all_variables_of_component & time_variables] else: relevant_variables = component.submodel.variables[all_variables_of_component & binary_time_variables] for variable in relevant_variables: self._equate_indices(component.submodel.variables[variable], indices) - penalty = self.aggregation_parameters.penalty_of_period_freedom - if (self.aggregation_parameters.percentage_of_period_freedom > 0) and penalty != 0: - for variable in self.variables_direct.values(): - self._model.effects.add_share_to_penalty('Aggregation', variable * penalty) + penalty = self.clustering_parameters.penalty_of_period_freedom + if (self.clustering_parameters.percentage_of_period_freedom > 0) and penalty != 0: + from .effects import PENALTY_EFFECT_LABEL + + for variable_name in self.variables_direct: + variable = self.variables_direct[variable_name] + # Sum correction variables over all dimensions to get periodic penalty contribution + self._model.effects.add_share_to_effects( + name='Aggregation', + expressions={PENALTY_EFFECT_LABEL: (variable * penalty).sum('time')}, + target='periodic', + ) def _equate_indices(self, variable: linopy.Variable, indices: tuple[np.ndarray, np.ndarray]) -> None: assert len(indices[0]) == len(indices[1]), 'The length of the indices must match!!' @@ -363,7 +374,7 @@ def _equate_indices(self, variable: linopy.Variable, indices: tuple[np.ndarray, # Korrektur: (bisher nur für Binärvariablen:) if ( variable.name in self._model.variables.binaries - and self.aggregation_parameters.percentage_of_period_freedom > 0 + and self.clustering_parameters.percentage_of_period_freedom > 0 ): sel = variable.isel(time=indices[0]) coords = {d: sel.indexes[d] for d in sel.dims} @@ -385,8 +396,44 @@ def _equate_indices(self, variable: linopy.Variable, indices: tuple[np.ndarray, # Begrenzung der Korrektur-Anzahl: # eq: sum(K) <= n_Corr_max - limit = int(np.floor(self.aggregation_parameters.percentage_of_period_freedom / 100 * length)) + limit = int(np.floor(self.clustering_parameters.percentage_of_period_freedom / 100 * length)) self.add_constraints( var_k0.sum(dim='time') + var_k1.sum(dim='time') <= limit, short_name=f'limit_corrections|{variable.name}', ) + + +# ===== Deprecated aliases for backward compatibility ===== + + +def _create_deprecation_warning(old_name: str, new_name: str): + """Helper to create a deprecation warning""" + _warnings.warn( + f"'{old_name}' is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. Use '{new_name}' instead.", + DeprecationWarning, + stacklevel=3, + ) + + +class Aggregation(Clustering): + """Deprecated: Use Clustering instead.""" + + def __init__(self, *args, **kwargs): + _create_deprecation_warning('Aggregation', 'Clustering') + super().__init__(*args, **kwargs) + + +class AggregationParameters(ClusteringParameters): + """Deprecated: Use ClusteringParameters instead.""" + + def __init__(self, *args, **kwargs): + _create_deprecation_warning('AggregationParameters', 'ClusteringParameters') + super().__init__(*args, **kwargs) + + +class AggregationModel(ClusteringModel): + """Deprecated: Use ClusteringModel instead.""" + + def __init__(self, *args, **kwargs): + _create_deprecation_warning('AggregationModel', 'ClusteringModel') + super().__init__(*args, **kwargs) diff --git a/flixopt/components.py b/flixopt/components.py index 0ad208024..07bc5f204 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -5,14 +5,12 @@ from __future__ import annotations import logging -import warnings from typing import TYPE_CHECKING, Literal import numpy as np import xarray as xr from . import io as fx_io -from .config import DEPRECATION_REMOVAL_VERSION from .core import PlausibilityError from .elements import Component, ComponentModel, Flow from .features import InvestmentModel, PiecewiseModel @@ -23,7 +21,6 @@ if TYPE_CHECKING: import linopy - from .flow_system import FlowSystem from .types import Numeric_PS, Numeric_TPS logger = logging.getLogger('flixopt') diff --git a/flixopt/config.py b/flixopt/config.py index 824f80b75..dbe2bf3c5 100644 --- a/flixopt/config.py +++ b/flixopt/config.py @@ -6,7 +6,10 @@ from logging.handlers import RotatingFileHandler from pathlib import Path from types import MappingProxyType -from typing import Literal +from typing import TYPE_CHECKING, Literal + +if TYPE_CHECKING: + from typing import TextIO try: import colorlog @@ -17,7 +20,7 @@ COLORLOG_AVAILABLE = False escape_codes = None -__all__ = ['CONFIG', 'change_logging_level', 'MultilineFormatter'] +__all__ = ['CONFIG', 'change_logging_level', 'MultilineFormatter', 'SUCCESS_LEVEL'] if COLORLOG_AVAILABLE: __all__.append('ColoredMultilineFormatter') @@ -30,18 +33,12 @@ DEPRECATION_REMOVAL_VERSION = '5.0.0' -def _success(self, message, *args, **kwargs): - """Log a message with severity 'SUCCESS'.""" - if self.isEnabledFor(SUCCESS_LEVEL): - self._log(SUCCESS_LEVEL, message, args, **kwargs) - - -# Add success() method to Logger class -logging.Logger.success = _success - - class MultilineFormatter(logging.Formatter): - """Custom formatter that handles multi-line messages with box-style borders.""" + """Custom formatter that handles multi-line messages with box-style borders. + + Uses Unicode box-drawing characters for prettier output, with a fallback + to simple formatting if any encoding issues occur. + """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -52,85 +49,99 @@ def __init__(self, *args, **kwargs): def format(self, record): """Format multi-line messages with box-style borders for better readability.""" - # Split into lines - lines = record.getMessage().split('\n') + try: + # Split into lines + lines = record.getMessage().split('\n') - # Add exception info if present (critical for logger.exception()) - if record.exc_info: - lines.extend(self.formatException(record.exc_info).split('\n')) - if record.stack_info: - lines.extend(record.stack_info.rstrip().split('\n')) + # Add exception info if present (critical for logger.exception()) + if record.exc_info: + lines.extend(self.formatException(record.exc_info).split('\n')) + if record.stack_info: + lines.extend(record.stack_info.rstrip().split('\n')) + + # Format time with date and milliseconds (YYYY-MM-DD HH:MM:SS.mmm) + # formatTime doesn't support %f, so use datetime directly + import datetime - # Format time with date and milliseconds (YYYY-MM-DD HH:MM:SS.mmm) - # formatTime doesn't support %f, so use datetime directly - import datetime + dt = datetime.datetime.fromtimestamp(record.created) + time_str = dt.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] - dt = datetime.datetime.fromtimestamp(record.created) - time_str = dt.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] + # Single line - return standard format + if len(lines) == 1: + level_str = f'{record.levelname: <8}' + return f'{time_str} {level_str} │ {lines[0]}' - # Single line - return standard format - if len(lines) == 1: + # Multi-line - use box format level_str = f'{record.levelname: <8}' - return f'{time_str} {level_str} │ {lines[0]}' + result = f'{time_str} {level_str} │ ┌─ {lines[0]}' + indent = ' ' * 23 # 23 spaces for time with date (YYYY-MM-DD HH:MM:SS.mmm) + for line in lines[1:-1]: + result += f'\n{indent} {" " * 8} │ │ {line}' + result += f'\n{indent} {" " * 8} │ └─ {lines[-1]}' - # Multi-line - use box format - level_str = f'{record.levelname: <8}' - result = f'{time_str} {level_str} │ ┌─ {lines[0]}' - indent = ' ' * 23 # 23 spaces for time with date (YYYY-MM-DD HH:MM:SS.mmm) - for line in lines[1:-1]: - result += f'\n{indent} {" " * 8} │ │ {line}' - result += f'\n{indent} {" " * 8} │ └─ {lines[-1]}' + return result - return result + except Exception as e: + # Fallback to simple formatting if anything goes wrong (e.g., encoding issues) + return f'{record.created} {record.levelname} - {record.getMessage()} [Formatting Error: {e}]' if COLORLOG_AVAILABLE: class ColoredMultilineFormatter(colorlog.ColoredFormatter): - """Colored formatter with multi-line message support.""" + """Colored formatter with multi-line message support. + + Uses Unicode box-drawing characters for prettier output, with a fallback + to simple formatting if any encoding issues occur. + """ def format(self, record): """Format multi-line messages with colors and box-style borders.""" - # Split into lines - lines = record.getMessage().split('\n') + try: + # Split into lines + lines = record.getMessage().split('\n') - # Add exception info if present (critical for logger.exception()) - if record.exc_info: - lines.extend(self.formatException(record.exc_info).split('\n')) - if record.stack_info: - lines.extend(record.stack_info.rstrip().split('\n')) + # Add exception info if present (critical for logger.exception()) + if record.exc_info: + lines.extend(self.formatException(record.exc_info).split('\n')) + if record.stack_info: + lines.extend(record.stack_info.rstrip().split('\n')) - # Format time with date and milliseconds (YYYY-MM-DD HH:MM:SS.mmm) - import datetime + # Format time with date and milliseconds (YYYY-MM-DD HH:MM:SS.mmm) + import datetime - # Use thin attribute for timestamp - dim = escape_codes['thin'] - reset = escape_codes['reset'] - # formatTime doesn't support %f, so use datetime directly - dt = datetime.datetime.fromtimestamp(record.created) - time_str = dt.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] - time_formatted = f'{dim}{time_str}{reset}' + # Use thin attribute for timestamp + dim = escape_codes['thin'] + reset = escape_codes['reset'] + # formatTime doesn't support %f, so use datetime directly + dt = datetime.datetime.fromtimestamp(record.created) + time_str = dt.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] + time_formatted = f'{dim}{time_str}{reset}' - # Get the color for this level - log_colors = self.log_colors - level_name = record.levelname - color_name = log_colors.get(level_name, '') - color = escape_codes.get(color_name, '') + # Get the color for this level + log_colors = self.log_colors + level_name = record.levelname + color_name = log_colors.get(level_name, '') + color = escape_codes.get(color_name, '') - level_str = f'{level_name: <8}' + level_str = f'{level_name: <8}' - # Single line - return standard colored format - if len(lines) == 1: - return f'{time_formatted} {color}{level_str}{reset} │ {lines[0]}' + # Single line - return standard colored format + if len(lines) == 1: + return f'{time_formatted} {color}{level_str}{reset} │ {lines[0]}' - # Multi-line - use box format with colors - result = f'{time_formatted} {color}{level_str}{reset} │ {color}┌─ {lines[0]}{reset}' - indent = ' ' * 23 # 23 spaces for time with date (YYYY-MM-DD HH:MM:SS.mmm) - for line in lines[1:-1]: - result += f'\n{dim}{indent}{reset} {" " * 8} │ {color}│ {line}{reset}' - result += f'\n{dim}{indent}{reset} {" " * 8} │ {color}└─ {lines[-1]}{reset}' + # Multi-line - use box format with colors + result = f'{time_formatted} {color}{level_str}{reset} │ {color}┌─{reset} {lines[0]}' + indent = ' ' * 23 # 23 spaces for time with date (YYYY-MM-DD HH:MM:SS.mmm) + for line in lines[1:-1]: + result += f'\n{dim}{indent}{reset} {" " * 8} │ {color}│{reset} {line}' + result += f'\n{dim}{indent}{reset} {" " * 8} │ {color}└─{reset} {lines[-1]}' - return result + return result + + except Exception as e: + # Fallback to simple formatting if anything goes wrong (e.g., encoding issues) + return f'{record.created} {record.levelname} - {record.getMessage()} [Formatting Error: {e}]' # SINGLE SOURCE OF TRUTH - immutable to prevent accidental modification @@ -224,12 +235,32 @@ class Logging: - ``disable()`` - Remove all handlers - ``set_colors(log_colors)`` - Customize level colors + Log Levels: + Standard levels plus custom SUCCESS level (between INFO and WARNING): + - DEBUG (10): Detailed debugging information + - INFO (20): General informational messages + - SUCCESS (25): Success messages (custom level) + - WARNING (30): Warning messages + - ERROR (40): Error messages + - CRITICAL (50): Critical error messages + Examples: ```python + import logging + from flixopt.config import CONFIG, SUCCESS_LEVEL + # Console and file logging CONFIG.Logging.enable_console('INFO') CONFIG.Logging.enable_file('DEBUG', 'debug.log') + # Use SUCCESS level with logger.log() + logger = logging.getLogger('flixopt') + CONFIG.Logging.enable_console('SUCCESS') # Shows SUCCESS, WARNING, ERROR, CRITICAL + logger.log(SUCCESS_LEVEL, 'Operation completed successfully!') + + # Or use numeric level directly + logger.log(25, 'Also works with numeric level') + # Customize colors CONFIG.Logging.set_colors( { @@ -267,11 +298,11 @@ class Logging: """ @classmethod - def enable_console(cls, level: str | int = 'INFO', colored: bool = True, stream=None) -> None: + def enable_console(cls, level: str | int = 'INFO', colored: bool = True, stream: TextIO | None = None) -> None: """Enable colored console logging. Args: - level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL or logging constant) + level: Log level (DEBUG, INFO, SUCCESS, WARNING, ERROR, CRITICAL or numeric level) colored: Use colored output if colorlog is available (default: True) stream: Output stream (default: sys.stdout). Can be sys.stdout or sys.stderr. @@ -303,7 +334,10 @@ def enable_console(cls, level: str | int = 'INFO', colored: bool = True, stream= # Convert string level to logging constant if isinstance(level, str): - level = getattr(logging, level.upper()) + if level.upper().strip() == 'SUCCESS': + level = SUCCESS_LEVEL + else: + level = getattr(logging, level.upper()) logger.setLevel(level) @@ -347,14 +381,16 @@ def enable_file( path: str | Path = 'flixopt.log', max_bytes: int = 10 * 1024 * 1024, backup_count: int = 5, + encoding: str = 'utf-8', ) -> None: """Enable file logging with rotation. Removes all existing file handlers! Args: - level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL or logging constant) + level: Log level (DEBUG, INFO, SUCCESS, WARNING, ERROR, CRITICAL or numeric level) path: Path to log file (default: 'flixopt.log') max_bytes: Maximum file size before rotation in bytes (default: 10MB) backup_count: Number of backup files to keep (default: 5) + encoding: File encoding (default: 'utf-8'). Use 'utf-8' for maximum compatibility. Note: For full control over formatting and handlers, use logging module directly. @@ -366,13 +402,19 @@ def enable_file( # With custom rotation CONFIG.Logging.enable_file('DEBUG', 'debug.log', max_bytes=50 * 1024 * 1024, backup_count=10) + + # With explicit encoding + CONFIG.Logging.enable_file('INFO', 'app.log', encoding='utf-8') ``` """ logger = logging.getLogger('flixopt') # Convert string level to logging constant if isinstance(level, str): - level = getattr(logging, level.upper()) + if level.upper().strip() == 'SUCCESS': + level = SUCCESS_LEVEL + else: + level = getattr(logging, level.upper()) logger.setLevel(level) @@ -385,7 +427,7 @@ def enable_file( log_path = Path(path) log_path.parent.mkdir(parents=True, exist_ok=True) - handler = RotatingFileHandler(path, maxBytes=max_bytes, backupCount=backup_count) + handler = RotatingFileHandler(path, maxBytes=max_bytes, backupCount=backup_count, encoding=encoding) handler.setFormatter(MultilineFormatter()) logger.addHandler(handler) @@ -699,6 +741,74 @@ def browser_plotting(cls) -> type[CONFIG]: return cls + @classmethod + def load_from_file(cls, config_file: str | Path) -> type[CONFIG]: + """Load configuration from YAML file and apply it. + + Args: + config_file: Path to the YAML configuration file. + + Returns: + The CONFIG class for method chaining. + + Raises: + FileNotFoundError: If the config file does not exist. + + Examples: + ```python + CONFIG.load_from_file('my_config.yaml') + ``` + + Example YAML file: + ```yaml + config_name: my_project + modeling: + big: 10000000 + epsilon: 0.00001 + solving: + mip_gap: 0.001 + time_limit_seconds: 600 + plotting: + default_engine: matplotlib + default_dpi: 600 + ``` + """ + # Import here to avoid circular import + from . import io as fx_io + + config_path = Path(config_file) + if not config_path.exists(): + raise FileNotFoundError(f'Config file not found: {config_file}') + + config_dict = fx_io.load_yaml(config_path) + cls._apply_config_dict(config_dict) + + return cls + + @classmethod + def _apply_config_dict(cls, config_dict: dict) -> None: + """Apply configuration dictionary to class attributes. + + Args: + config_dict: Dictionary containing configuration values. + """ + for key, value in config_dict.items(): + if key == 'modeling' and isinstance(value, dict): + for nested_key, nested_value in value.items(): + if hasattr(cls.Modeling, nested_key): + setattr(cls.Modeling, nested_key, nested_value) + elif key == 'solving' and isinstance(value, dict): + for nested_key, nested_value in value.items(): + if hasattr(cls.Solving, nested_key): + setattr(cls.Solving, nested_key, nested_value) + elif key == 'plotting' and isinstance(value, dict): + for nested_key, nested_value in value.items(): + if hasattr(cls.Plotting, nested_key): + setattr(cls.Plotting, nested_key, nested_value) + elif hasattr(cls, key) and key != 'logging': + # Skip 'logging' as it requires special handling via CONFIG.Logging methods + setattr(cls, key, value) + def change_logging_level(level_name: str | int) -> None: """Change the logging level for the flixopt logger. diff --git a/flixopt/elements.py b/flixopt/elements.py index 0f57c06d5..17730bc98 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -28,13 +28,7 @@ if TYPE_CHECKING: import linopy - from .flow_system import FlowSystem from .types import ( - Bool_PS, - Bool_S, - Bool_TPS, - Effect_PS, - Effect_S, Effect_TPS, Numeric_PS, Numeric_S, @@ -495,7 +489,8 @@ def __init__( self.bus = bus.label_full warnings.warn( f'Bus {bus.label} is passed as a Bus object to {self.label}. This is deprecated and will be removed ' - f'in the future. Add the Bus to the FlowSystem instead and pass its label to the Flow.', + f'in the future. Add the Bus to the FlowSystem instead and pass its label to the Flow. ' + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', UserWarning, stacklevel=1, ) @@ -895,8 +890,19 @@ def _do_modeling(self): eq_bus_balance.lhs -= -self.excess_input + self.excess_output - self._model.effects.add_share_to_penalty(self.label_of_element, (self.excess_input * excess_penalty).sum()) - self._model.effects.add_share_to_penalty(self.label_of_element, (self.excess_output * excess_penalty).sum()) + # Add penalty shares as temporal effects (time-dependent) + from .effects import PENALTY_EFFECT_LABEL + + self._model.effects.add_share_to_effects( + name=self.label_of_element, + expressions={PENALTY_EFFECT_LABEL: self.excess_input * excess_penalty}, + target='temporal', + ) + self._model.effects.add_share_to_effects( + name=self.label_of_element, + expressions={PENALTY_EFFECT_LABEL: self.excess_output * excess_penalty}, + target='temporal', + ) def results_structure(self): inputs = [flow.submodel.flow_rate.name for flow in self.element.inputs] diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 63bb7b16d..52c403396 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -8,14 +8,14 @@ import warnings from collections import defaultdict from itertools import chain -from typing import TYPE_CHECKING, Any, Literal, Optional +from typing import TYPE_CHECKING, Any, Literal import numpy as np import pandas as pd import xarray as xr from . import io as fx_io -from .config import CONFIG +from .config import CONFIG, DEPRECATION_REMOVAL_VERSION from .core import ( ConversionError, DataConverter, @@ -32,7 +32,7 @@ import pyvis - from .types import Bool_TPS, Effect_TPS, Numeric_PS, Numeric_S, Numeric_TPS, NumericOrBool + from .types import Effect_TPS, Numeric_S, Numeric_TPS, NumericOrBool logger = logging.getLogger('flixopt') @@ -150,7 +150,7 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): - The `.flows` container is automatically populated from all component inputs and outputs. - Creates an empty registry for components and buses, an empty EffectCollection, and a placeholder for a SystemModel. - The instance starts disconnected (self._connected_and_transformed == False) and will be - connected_and_transformed automatically when trying to solve a calculation. + connected_and_transformed automatically when trying to optimize. """ model: FlowSystemModel | None @@ -211,7 +211,7 @@ def __init__( self.model: FlowSystemModel | None = None self._connected_and_transformed = False - self._used_in_calculation = False + self._used_in_optimization = False self._network_app = None self._flows_cache: ElementContainer[Flow] | None = None @@ -995,7 +995,8 @@ def _connect_network(self): warnings.warn( f'The Bus {flow._bus_object.label_full} was added to the FlowSystem from {flow.label_full}.' f'This is deprecated and will be removed in the future. ' - f'Please pass the Bus.label to the Flow and the Bus to the FlowSystem instead.', + f'Please pass the Bus.label to the Flow and the Bus to the FlowSystem instead. ' + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=1, ) @@ -1109,7 +1110,7 @@ def all_elements(self) -> dict[str, Element]: "The 'all_elements' property is deprecated. Use dict-like interface instead: " "flow_system['element'], 'element' in flow_system, flow_system.keys(), " 'flow_system.values(), or flow_system.items(). ' - 'This property will be removed in v4.0.0.', + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -1126,7 +1127,7 @@ def coords(self) -> dict[FlowSystemDimensions, pd.Index]: @property def used_in_calculation(self) -> bool: - return self._used_in_calculation + return self._used_in_optimization @property def scenario_weights(self) -> xr.DataArray | None: @@ -1165,7 +1166,8 @@ def scenario_weights(self, value: Numeric_S | None) -> None: @property def weights(self) -> Numeric_S | None: warnings.warn( - 'FlowSystem.weights is deprecated. Use FlowSystem.scenario_weights instead.', + f'FlowSystem.weights is deprecated. Use FlowSystem.scenario_weights instead. ' + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -1180,7 +1182,8 @@ def weights(self, value: Numeric_S) -> None: value: Scenario weights to set """ warnings.warn( - 'Setting FlowSystem.weights is deprecated. Set FlowSystem.scenario_weights instead.', + f'Setting FlowSystem.weights is deprecated. Set FlowSystem.scenario_weights instead. ' + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) diff --git a/flixopt/interface.py b/flixopt/interface.py index 93d4e188b..cfa210f6d 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -6,20 +6,18 @@ from __future__ import annotations import logging -import warnings -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING import numpy as np import pandas as pd import xarray as xr -from .config import CONFIG, DEPRECATION_REMOVAL_VERSION +from .config import CONFIG from .structure import Interface, register_class_for_io if TYPE_CHECKING: # for type checking and preventing circular imports from collections.abc import Iterator - from .flow_system import FlowSystem from .types import Effect_PS, Effect_TPS, Numeric_PS, Numeric_TPS logger = logging.getLogger('flixopt') @@ -737,7 +735,7 @@ class InvestParameters(Interface): For long-term investments, the cost values should be annualized to the corresponding operation time (annuity). - Use equivalent annual cost (capital cost / equipment lifetime) - - Apply appropriate discount rates for present value calculations + - Apply appropriate discount rates for present value optimizations - Account for inflation, escalation, and financing costs Example: €1M equipment with 20-year life → €50k/year fixed cost diff --git a/flixopt/io.py b/flixopt/io.py index 294822b7c..27bc242ff 100644 --- a/flixopt/io.py +++ b/flixopt/io.py @@ -598,8 +598,8 @@ def load_dataset_from_netcdf(path: str | pathlib.Path) -> xr.Dataset: @dataclass -class CalculationResultsPaths: - """Container for all paths related to saving CalculationResults.""" +class ResultsPaths: + """Container for all paths related to saving Results.""" folder: pathlib.Path name: str @@ -628,18 +628,24 @@ def all_paths(self) -> dict[str, pathlib.Path]: 'model_documentation': self.model_documentation, } - def create_folders(self, parents: bool = False) -> None: + def create_folders(self, parents: bool = False, exist_ok: bool = True) -> None: """Ensure the folder exists. + Args: - parents: Whether to create the parent folders if they do not exist. + parents: If True, create parent directories as needed. If False, parent must exist. + exist_ok: If True, do not raise error if folder already exists. If False, raise FileExistsError. + + Raises: + FileNotFoundError: If parents=False and parent directory doesn't exist. + FileExistsError: If exist_ok=False and folder already exists. """ - if not self.folder.exists(): - try: - self.folder.mkdir(parents=parents) - except FileNotFoundError as e: - raise FileNotFoundError( - f'Folder {self.folder} and its parent do not exist. Please create them first.' - ) from e + try: + self.folder.mkdir(parents=parents, exist_ok=exist_ok) + except FileNotFoundError as e: + raise FileNotFoundError( + f'Cannot create folder {self.folder}: parent directory does not exist. ' + f'Use parents=True to create parent directories.' + ) from e def update(self, new_name: str | None = None, new_folder: pathlib.Path | None = None) -> None: """Update name and/or folder and refresh all paths.""" diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index f53a79567..9ca73519e 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -5,14 +5,11 @@ from __future__ import annotations import logging -import warnings from typing import TYPE_CHECKING import numpy as np from .components import LinearConverter -from .config import DEPRECATION_REMOVAL_VERSION -from .core import TimeSeriesData from .structure import register_class_for_io if TYPE_CHECKING: diff --git a/flixopt/modeling.py b/flixopt/modeling.py index 13b4c0e3e..01a2c2410 100644 --- a/flixopt/modeling.py +++ b/flixopt/modeling.py @@ -11,7 +11,7 @@ class ModelingUtilitiesAbstract: - """Utility functions for modeling calculations - leveraging xarray for temporal data""" + """Utility functions for modeling - leveraging xarray for temporal data""" @staticmethod def to_binary( diff --git a/flixopt/network_app.py b/flixopt/network_app.py index d18bc44a8..32b0af2cd 100644 --- a/flixopt/network_app.py +++ b/flixopt/network_app.py @@ -19,6 +19,7 @@ VISUALIZATION_ERROR = str(e) from .components import LinearConverter, Sink, Source, SourceAndSink, Storage +from .config import SUCCESS_LEVEL from .elements import Bus if TYPE_CHECKING: @@ -780,7 +781,7 @@ def find_free_port(start_port=8050, end_port=8100): server_thread = threading.Thread(target=server.serve_forever, daemon=True) server_thread.start() - logger.success(f'Network visualization started on http://127.0.0.1:{port}/') + logger.log(SUCCESS_LEVEL, f'Network visualization started on http://127.0.0.1:{port}/') # Store server reference for cleanup app.server_instance = server diff --git a/flixopt/optimization.py b/flixopt/optimization.py new file mode 100644 index 000000000..e537029d7 --- /dev/null +++ b/flixopt/optimization.py @@ -0,0 +1,989 @@ +""" +This module contains the Optimization functionality for the flixopt framework. +It is used to optimize a FlowSystemModel for a given FlowSystem through a solver. +There are three different Optimization types: + 1. Optimization: Optimizes the FlowSystemModel for the full FlowSystem + 2. ClusteredOptimization: Optimizes the FlowSystemModel for the full FlowSystem, but clusters the TimeSeriesData. + This simplifies the mathematical model and usually speeds up the solving process. + 3. SegmentedOptimization: Solves a FlowSystemModel for each individual Segment of the FlowSystem. +""" + +from __future__ import annotations + +import logging +import math +import pathlib +import sys +import timeit +import warnings +from collections import Counter +from typing import TYPE_CHECKING, Annotated, Any, Protocol, runtime_checkable + +import numpy as np +from tqdm import tqdm + +from . import io as fx_io +from .clustering import Clustering, ClusteringModel, ClusteringParameters +from .components import Storage +from .config import CONFIG, SUCCESS_LEVEL +from .core import DEPRECATION_REMOVAL_VERSION, DataConverter, TimeSeriesData, drop_constant_arrays +from .effects import PENALTY_EFFECT_LABEL +from .features import InvestmentModel +from .flow_system import FlowSystem +from .results import Results, SegmentedResults + +if TYPE_CHECKING: + import pandas as pd + import xarray as xr + + from .elements import Component + from .solvers import _Solver + from .structure import FlowSystemModel + +logger = logging.getLogger('flixopt') + + +@runtime_checkable +class OptimizationProtocol(Protocol): + """ + Protocol defining the interface that all optimization types should implement. + + This protocol ensures type consistency across different optimization approaches + without forcing them into an artificial inheritance hierarchy. + + Attributes: + name: Name of the optimization + flow_system: FlowSystem being optimized + folder: Directory where results are saved + results: Results object after solving + durations: Dictionary tracking time spent in different phases + """ + + name: str + flow_system: FlowSystem + folder: pathlib.Path + results: Results | SegmentedResults | None + durations: dict[str, float] + + @property + def modeled(self) -> bool: + """Returns True if the optimization has been modeled.""" + ... + + @property + def main_results(self) -> dict[str, int | float | dict]: + """Returns main results including objective, effects, and investment decisions.""" + ... + + @property + def summary(self) -> dict: + """Returns summary information about the optimization.""" + ... + + +def _initialize_optimization_common( + obj: Any, + name: str, + flow_system: FlowSystem, + active_timesteps: pd.DatetimeIndex | None = None, + folder: pathlib.Path | None = None, + normalize_weights: bool = True, +) -> None: + """ + Shared initialization logic for all optimization types. + + This helper function encapsulates common initialization code to avoid duplication + across Optimization, ClusteredOptimization, and SegmentedOptimization. + + Args: + obj: The optimization object being initialized + name: Name of the optimization + flow_system: FlowSystem to optimize + active_timesteps: DEPRECATED. Use flow_system.sel(time=...) instead + folder: Directory for saving results + normalize_weights: Whether to normalize scenario weights + """ + obj.name = name + + if flow_system.used_in_calculation: + logger.warning( + f'This FlowSystem is already used in an optimization:\n{flow_system}\n' + f'Creating a copy of the FlowSystem for Optimization "{obj.name}".' + ) + flow_system = flow_system.copy() + + if active_timesteps is not None: + warnings.warn( + f"The 'active_timesteps' parameter is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. " + 'Use flow_system.sel(time=timesteps) or flow_system.isel(time=indices) before passing ' + 'the FlowSystem to the Optimization instead.', + DeprecationWarning, + stacklevel=2, + ) + flow_system = flow_system.sel(time=active_timesteps) + + obj._active_timesteps = active_timesteps # deprecated + obj.normalize_weights = normalize_weights + + flow_system._used_in_optimization = True + + obj.flow_system = flow_system + obj.model = None + + obj.durations = {'modeling': 0.0, 'solving': 0.0, 'saving': 0.0} + obj.folder = pathlib.Path.cwd() / 'results' if folder is None else pathlib.Path(folder) + obj.results = None + + if obj.folder.exists() and not obj.folder.is_dir(): + raise NotADirectoryError(f'Path {obj.folder} exists and is not a directory.') + # Create folder and any necessary parent directories + obj.folder.mkdir(parents=True, exist_ok=True) + + +class Optimization: + """ + Standard optimization that solves the complete problem using all time steps. + + This is the default optimization approach that considers every time step, + providing the most accurate but computationally intensive solution. + + For large problems, consider using ClusteredOptimization (time aggregation) + or SegmentedOptimization (temporal decomposition) instead. + + Args: + name: name of optimization + flow_system: flow_system which should be optimized + folder: folder where results should be saved. If None, then the current working directory is used. + normalize_weights: Whether to automatically normalize the weights of scenarios to sum up to 1 when solving. + active_timesteps: Deprecated. Use FlowSystem.sel(time=...) or FlowSystem.isel(time=...) instead. + + Examples: + Basic usage: + ```python + from flixopt import Optimization + + opt = Optimization(name='my_optimization', flow_system=energy_system, folder=Path('results')) + opt.do_modeling() + opt.solve(solver=gurobi) + results = opt.results + ``` + """ + + # Attributes set by __init__ / _initialize_optimization_common + name: str + flow_system: FlowSystem + folder: pathlib.Path + results: Results | None + durations: dict[str, float] + model: FlowSystemModel | None + normalize_weights: bool + + def __init__( + self, + name: str, + flow_system: FlowSystem, + active_timesteps: Annotated[ + pd.DatetimeIndex | None, + 'DEPRECATED: Use flow_system.sel(time=...) or flow_system.isel(time=...) instead', + ] = None, + folder: pathlib.Path | None = None, + normalize_weights: bool = True, + ): + _initialize_optimization_common( + self, + name=name, + flow_system=flow_system, + active_timesteps=active_timesteps, + folder=folder, + normalize_weights=normalize_weights, + ) + + def do_modeling(self) -> Optimization: + t_start = timeit.default_timer() + self.flow_system.connect_and_transform() + + self.model = self.flow_system.create_model(self.normalize_weights) + self.model.do_modeling() + + self.durations['modeling'] = round(timeit.default_timer() - t_start, 2) + return self + + def fix_sizes(self, ds: xr.Dataset | None = None, decimal_rounding: int | None = 5) -> Optimization: + """Fix the sizes of the optimizations to specified values. + + Args: + ds: The dataset that contains the variable names mapped to their sizes. If None, the dataset is loaded from the results. + decimal_rounding: The number of decimal places to round the sizes to. If no rounding is applied, numerical errors might lead to infeasibility. + """ + if not self.modeled: + raise RuntimeError('Model was not created. Call do_modeling() first.') + + if ds is None: + if self.results is None: + raise RuntimeError('No dataset provided and no results available to load sizes from.') + ds = self.results.solution + + if decimal_rounding is not None: + ds = ds.round(decimal_rounding) + + for name, da in ds.data_vars.items(): + if '|size' not in name: + continue + if name not in self.model.variables: + logger.debug(f'Variable {name} not found in calculation model. Skipping.') + continue + + con = self.model.add_constraints( + self.model[name] == da, + name=f'{name}-fixed', + ) + logger.debug(f'Fixed "{name}":\n{con}') + + return self + + def solve( + self, solver: _Solver, log_file: pathlib.Path | None = None, log_main_results: bool | None = None + ) -> Optimization: + # Auto-call do_modeling() if not already done + if not self.modeled: + logger.info('Model not yet created. Calling do_modeling() automatically.') + self.do_modeling() + + t_start = timeit.default_timer() + + self.model.solve( + log_fn=pathlib.Path(log_file) if log_file is not None else self.folder / f'{self.name}.log', + solver_name=solver.name, + **solver.options, + ) + self.durations['solving'] = round(timeit.default_timer() - t_start, 2) + logger.log(SUCCESS_LEVEL, f'Model solved with {solver.name} in {self.durations["solving"]:.2f} seconds.') + logger.info(f'Model status after solve: {self.model.status}') + + if self.model.status == 'warning': + # Save the model and the flow_system to file in case of infeasibility + self.folder.mkdir(parents=True, exist_ok=True) + paths = fx_io.ResultsPaths(self.folder, self.name) + from .io import document_linopy_model + + document_linopy_model(self.model, paths.model_documentation) + self.flow_system.to_netcdf(paths.flow_system) + raise RuntimeError( + f'Model was infeasible. Please check {paths.model_documentation=} and {paths.flow_system=} for more information.' + ) + + # Log the formatted output + should_log = log_main_results if log_main_results is not None else CONFIG.Solving.log_main_results + if should_log and logger.isEnabledFor(logging.INFO): + logger.log( + SUCCESS_LEVEL, + f'{" Main Results ":#^80}\n' + fx_io.format_yaml_string(self.main_results, compact_numeric_lists=True), + ) + + self.results = Results.from_optimization(self) + + return self + + @property + def main_results(self) -> dict[str, int | float | dict]: + if self.model is None: + raise RuntimeError('Optimization has not been solved yet. Call solve() before accessing main_results.') + + try: + penalty_effect = self.flow_system.effects.penalty_effect + penalty_section = { + 'temporal': penalty_effect.submodel.temporal.total.solution.values, + 'periodic': penalty_effect.submodel.periodic.total.solution.values, + 'total': penalty_effect.submodel.total.solution.values, + } + except KeyError: + penalty_section = {'temporal': 0.0, 'periodic': 0.0, 'total': 0.0} + + main_results = { + 'Objective': self.model.objective.value, + 'Penalty': penalty_section, + 'Effects': { + f'{effect.label} [{effect.unit}]': { + 'temporal': effect.submodel.temporal.total.solution.values, + 'periodic': effect.submodel.periodic.total.solution.values, + 'total': effect.submodel.total.solution.values, + } + for effect in sorted(self.flow_system.effects.values(), key=lambda e: e.label_full.upper()) + if effect.label_full != PENALTY_EFFECT_LABEL + }, + 'Invest-Decisions': { + 'Invested': { + model.label_of_element: model.size.solution + for component in self.flow_system.components.values() + for model in component.submodel.all_submodels + if isinstance(model, InvestmentModel) + and model.size.solution.max().item() >= CONFIG.Modeling.epsilon + }, + 'Not invested': { + model.label_of_element: model.size.solution + for component in self.flow_system.components.values() + for model in component.submodel.all_submodels + if isinstance(model, InvestmentModel) and model.size.solution.max().item() < CONFIG.Modeling.epsilon + }, + }, + 'Buses with excess': [ + { + bus.label_full: { + 'input': bus.submodel.excess_input.solution.sum('time'), + 'output': bus.submodel.excess_output.solution.sum('time'), + } + } + for bus in self.flow_system.buses.values() + if bus.with_excess + and ( + bus.submodel.excess_input.solution.sum().item() > 1e-3 + or bus.submodel.excess_output.solution.sum().item() > 1e-3 + ) + ], + } + + return fx_io.round_nested_floats(main_results) + + @property + def summary(self): + if self.model is None: + raise RuntimeError('Optimization has not been solved yet. Call solve() before accessing summary.') + + return { + 'Name': self.name, + 'Number of timesteps': len(self.flow_system.timesteps), + 'Optimization Type': self.__class__.__name__, + 'Constraints': self.model.constraints.ncons, + 'Variables': self.model.variables.nvars, + 'Main Results': self.main_results, + 'Durations': self.durations, + 'Config': CONFIG.to_dict(), + } + + @property + def active_timesteps(self) -> pd.DatetimeIndex | None: + warnings.warn( + f'active_timesteps is deprecated. Use flow_system.sel(time=...) or flow_system.isel(time=...) instead. ' + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', + DeprecationWarning, + stacklevel=2, + ) + return self._active_timesteps + + @property + def modeled(self) -> bool: + return True if self.model is not None else False + + +class ClusteredOptimization(Optimization): + """ + ClusteredOptimization reduces computational complexity by clustering time series into typical periods. + + This optimization approach clusters time series data using techniques from the tsam library to identify + representative time periods, significantly reducing computation time while maintaining solution accuracy. + + Note: + The quality of the solution depends on the choice of aggregation parameters. + The optimal parameters depend on the specific problem and the characteristics of the time series data. + For more information, refer to the [tsam documentation](https://tsam.readthedocs.io/en/latest/). + + Args: + name: Name of the optimization + flow_system: FlowSystem to be optimized + clustering_parameters: Parameters for clustering. See ClusteringParameters class documentation + components_to_clusterize: list of Components to perform aggregation on. If None, all components are aggregated. + This equalizes variables in the components according to the typical periods computed in the aggregation + active_timesteps: DatetimeIndex of timesteps to use for optimization. If None, all timesteps are used + folder: Folder where results should be saved. If None, current working directory is used + normalize_weights: Whether to automatically normalize the weights of scenarios to sum up to 1 when solving + + Attributes: + clustering (Clustering | None): Contains the clustered time series data + clustering_model (ClusteringModel | None): Contains Variables and Constraints that equalize clusters of the time series data + """ + + def __init__( + self, + name: str, + flow_system: FlowSystem, + clustering_parameters: ClusteringParameters, + components_to_clusterize: list[Component] | None = None, + active_timesteps: Annotated[ + pd.DatetimeIndex | None, + 'DEPRECATED: Use flow_system.sel(time=...) or flow_system.isel(time=...) instead', + ] = None, + folder: pathlib.Path | None = None, + normalize_weights: bool = True, + ): + if flow_system.scenarios is not None: + raise ValueError('Clustering is not supported for scenarios yet. Please use Optimization instead.') + if flow_system.periods is not None: + raise ValueError('Clustering is not supported for periods yet. Please use Optimization instead.') + super().__init__( + name=name, + flow_system=flow_system, + active_timesteps=active_timesteps, + folder=folder, + normalize_weights=normalize_weights, + ) + self.clustering_parameters = clustering_parameters + self.components_to_clusterize = components_to_clusterize + self.clustering: Clustering | None = None + self.clustering_model: ClusteringModel | None = None + + def do_modeling(self) -> ClusteredOptimization: + t_start = timeit.default_timer() + self.flow_system.connect_and_transform() + self._perform_clustering() + + # Model the System + self.model = self.flow_system.create_model(self.normalize_weights) + self.model.do_modeling() + # Add Clustering Submodel after modeling the rest + self.clustering_model = ClusteringModel( + self.model, self.clustering_parameters, self.flow_system, self.clustering, self.components_to_clusterize + ) + self.clustering_model.do_modeling() + self.durations['modeling'] = round(timeit.default_timer() - t_start, 2) + return self + + def _perform_clustering(self): + from .clustering import Clustering + + t_start_agg = timeit.default_timer() + + # Validation + dt_min = float(self.flow_system.hours_per_timestep.min().item()) + dt_max = float(self.flow_system.hours_per_timestep.max().item()) + if not dt_min == dt_max: + raise ValueError( + f'Clustering failed due to inconsistent time step sizes:delta_t varies from {dt_min} to {dt_max} hours.' + ) + ratio = self.clustering_parameters.hours_per_period / dt_max + if not np.isclose(ratio, round(ratio), atol=1e-9): + raise ValueError( + f'The selected {self.clustering_parameters.hours_per_period=} does not match the time ' + f'step size of {dt_max} hours. It must be an integer multiple of {dt_max} hours.' + ) + + logger.info(f'{"":#^80}') + logger.info(f'{" Clustering TimeSeries Data ":#^80}') + + ds = self.flow_system.to_dataset() + + temporaly_changing_ds = drop_constant_arrays(ds, dim='time') + + # Clustering - creation of clustered timeseries: + self.clustering = Clustering( + original_data=temporaly_changing_ds.to_dataframe(), + hours_per_time_step=float(dt_min), + hours_per_period=self.clustering_parameters.hours_per_period, + nr_of_periods=self.clustering_parameters.nr_of_periods, + weights=self.calculate_clustering_weights(temporaly_changing_ds), + time_series_for_high_peaks=self.clustering_parameters.labels_for_high_peaks, + time_series_for_low_peaks=self.clustering_parameters.labels_for_low_peaks, + ) + + self.clustering.cluster() + self.clustering.plot(show=CONFIG.Plotting.default_show, save=self.folder / 'clustering.html') + if self.clustering_parameters.aggregate_data_and_fix_non_binary_vars: + ds = self.flow_system.to_dataset() + for name, series in self.clustering.aggregated_data.items(): + da = ( + DataConverter.to_dataarray(series, self.flow_system.coords) + .rename(name) + .assign_attrs(ds[name].attrs) + ) + if TimeSeriesData.is_timeseries_data(da): + da = TimeSeriesData.from_dataarray(da) + + ds[name] = da + + self.flow_system = FlowSystem.from_dataset(ds) + self.flow_system.connect_and_transform() + self.durations['clustering'] = round(timeit.default_timer() - t_start_agg, 2) + + def _perform_aggregation(self): + """Deprecated: Use _perform_clustering instead.""" + warnings.warn( + f'_perform_aggregation is deprecated, use _perform_clustering instead. ' + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', + DeprecationWarning, + stacklevel=2, + ) + return self._perform_clustering() + + @classmethod + def calculate_clustering_weights(cls, ds: xr.Dataset) -> dict[str, float]: + """Calculate weights for all datavars in the dataset. Weights are pulled from the attrs of the datavars.""" + + # Support both old and new attr names for backward compatibility + groups = [ + da.attrs.get('clustering_group', da.attrs.get('aggregation_group')) + for da in ds.data_vars.values() + if 'clustering_group' in da.attrs or 'aggregation_group' in da.attrs + ] + group_counts = Counter(groups) + + # Calculate weight for each group (1/count) + group_weights = {group: 1 / count for group, count in group_counts.items()} + + weights = {} + for name, da in ds.data_vars.items(): + # Try both old and new attr names + clustering_group = da.attrs.get('clustering_group', da.attrs.get('aggregation_group')) + group_weight = group_weights.get(clustering_group) + if group_weight is not None: + weights[name] = group_weight + else: + # Try both old and new attr names for weight + weights[name] = da.attrs.get('clustering_weight', da.attrs.get('aggregation_weight', 1)) + + if np.all(np.isclose(list(weights.values()), 1, atol=1e-6)): + logger.info('All Clustering weights were set to 1') + + return weights + + @classmethod + def calculate_aggregation_weights(cls, ds: xr.Dataset) -> dict[str, float]: + """Deprecated: Use calculate_clustering_weights instead.""" + warnings.warn( + f'calculate_aggregation_weights is deprecated, use calculate_clustering_weights instead. ' + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', + DeprecationWarning, + stacklevel=2, + ) + return cls.calculate_clustering_weights(ds) + + +class SegmentedOptimization: + """Solve large optimization problems by dividing time horizon into (overlapping) segments. + + This class addresses memory and computational limitations of large-scale optimization + problems by decomposing the time horizon into smaller overlapping segments that are + solved sequentially. Each segment uses final values from the previous segment as + initial conditions, ensuring dynamic continuity across the solution. + + Key Concepts: + **Temporal Decomposition**: Divides long time horizons into manageable segments + **Overlapping Windows**: Segments share timesteps to improve storage dynamics + **Value Transfer**: Final states of one segment become initial states of the next + **Sequential Solving**: Each segment solved independently but with coupling + + Limitations and Constraints: + **Investment Parameters**: InvestParameters are not supported in segmented optimizations + as investment decisions must be made for the entire time horizon, not per segment. + + **Global Constraints**: Time-horizon-wide constraints (flow_hours_total_min/max, + load_factor_min/max) may produce suboptimal results as they cannot be enforced + globally across segments. + + **Storage Dynamics**: While overlap helps, storage optimization may be suboptimal + compared to full-horizon solutions due to limited foresight in each segment. + + Args: + name: Unique identifier for the calculation, used in result files and logging. + flow_system: The FlowSystem to optimize, containing all components, flows, and buses. + timesteps_per_segment: Number of timesteps in each segment (excluding overlap). + Must be > 2 to avoid internal side effects. Larger values provide better + optimization at the cost of memory and computation time. + overlap_timesteps: Number of additional timesteps added to each segment. + Improves storage optimization by providing lookahead. Higher values + improve solution quality but increase computational cost. + nr_of_previous_values: Number of previous timestep values to transfer between + segments for initialization. Typically 1 is sufficient. + folder: Directory for saving results. Defaults to current working directory + 'results'. + + Examples: + Annual optimization with monthly segments: + + ```python + # 8760 hours annual data with monthly segments (730 hours) and 48-hour overlap + segmented_calc = SegmentedOptimization( + name='annual_energy_system', + flow_system=energy_system, + timesteps_per_segment=730, # ~1 month + overlap_timesteps=48, # 2 days overlap + folder=Path('results/segmented'), + ) + segmented_calc.do_modeling_and_solve(solver='gurobi') + ``` + + Weekly optimization with daily overlap: + + ```python + # Weekly segments for detailed operational planning + weekly_calc = SegmentedOptimization( + name='weekly_operations', + flow_system=industrial_system, + timesteps_per_segment=168, # 1 week (hourly data) + overlap_timesteps=24, # 1 day overlap + nr_of_previous_values=1, + ) + ``` + + Large-scale system with minimal overlap: + + ```python + # Large system with minimal overlap for computational efficiency + large_calc = SegmentedOptimization( + name='large_scale_grid', + flow_system=grid_system, + timesteps_per_segment=100, # Shorter segments + overlap_timesteps=5, # Minimal overlap + ) + ``` + + Design Considerations: + **Segment Size**: Balance between solution quality and computational efficiency. + Larger segments provide better optimization but require more memory and time. + + **Overlap Duration**: More overlap improves storage dynamics and reduces + end-effects but increases computational cost. Typically 5-10% of segment length. + + **Storage Systems**: Systems with large storage components benefit from longer + overlaps to capture charge/discharge cycles effectively. + + **Investment Decisions**: Use Optimization for problems requiring investment + optimization, as SegmentedOptimization cannot handle investment parameters. + + Common Use Cases: + - **Annual Planning**: Long-term planning with seasonal variations + - **Large Networks**: Spatially or temporally large energy systems + - **Memory-Limited Systems**: When full optimization exceeds available memory + - **Operational Planning**: Detailed short-term optimization with limited foresight + - **Sensitivity Analysis**: Quick approximate solutions for parameter studies + + Performance Tips: + - Start with Optimization and use this class if memory issues occur + - Use longer overlaps for systems with significant storage + - Monitor solution quality at segment boundaries for discontinuities + + Warning: + The evaluation of the solution is a bit more complex than Optimization or ClusteredOptimization + due to the overlapping individual solutions. + + """ + + # Attributes set by __init__ / _initialize_optimization_common + name: str + flow_system: FlowSystem + folder: pathlib.Path + results: SegmentedResults | None + durations: dict[str, float] + model: None # SegmentedOptimization doesn't use a single model + normalize_weights: bool + _active_timesteps: pd.DatetimeIndex | None + + def __init__( + self, + name: str, + flow_system: FlowSystem, + timesteps_per_segment: int, + overlap_timesteps: int, + nr_of_previous_values: int = 1, + folder: pathlib.Path | None = None, + ): + _initialize_optimization_common( + self, + name=name, + flow_system=flow_system, + active_timesteps=None, + folder=folder, + ) + self.timesteps_per_segment = timesteps_per_segment + self.overlap_timesteps = overlap_timesteps + self.nr_of_previous_values = nr_of_previous_values + + # Validate overlap_timesteps early + if self.overlap_timesteps < 0: + raise ValueError('overlap_timesteps must be non-negative.') + + # Validate timesteps_per_segment early (before using in arithmetic) + if self.timesteps_per_segment <= 2: + raise ValueError('timesteps_per_segment must be greater than 2 due to internal side effects.') + + # Validate nr_of_previous_values + if self.nr_of_previous_values < 0: + raise ValueError('nr_of_previous_values must be non-negative.') + if self.nr_of_previous_values > self.timesteps_per_segment: + raise ValueError('nr_of_previous_values cannot exceed timesteps_per_segment.') + + self.sub_optimizations: list[Optimization] = [] + + self.segment_names = [ + f'Segment_{i + 1}' for i in range(math.ceil(len(self.all_timesteps) / self.timesteps_per_segment)) + ] + self._timesteps_per_segment = self._calculate_timesteps_per_segment() + + if self.timesteps_per_segment_with_overlap > len(self.all_timesteps): + raise ValueError( + f'timesteps_per_segment_with_overlap ({self.timesteps_per_segment_with_overlap}) ' + f'cannot exceed total timesteps ({len(self.all_timesteps)}).' + ) + + self.flow_system._connect_network() # Connect network to ensure that all Flows know their Component + # Storing all original start values + self._original_start_values = { + **{flow.label_full: flow.previous_flow_rate for flow in self.flow_system.flows.values()}, + **{ + comp.label_full: comp.initial_charge_state + for comp in self.flow_system.components.values() + if isinstance(comp, Storage) + }, + } + self._transfered_start_values: list[dict[str, Any]] = [] + + def _create_sub_optimizations(self): + for i, (segment_name, timesteps_of_segment) in enumerate( + zip(self.segment_names, self._timesteps_per_segment, strict=True) + ): + calc = Optimization(f'{self.name}-{segment_name}', self.flow_system.sel(time=timesteps_of_segment)) + calc.flow_system._connect_network() # Connect to have Correct names of Flows! + + self.sub_optimizations.append(calc) + logger.info( + f'{segment_name} [{i + 1:>2}/{len(self.segment_names):<2}] ' + f'({timesteps_of_segment[0]} -> {timesteps_of_segment[-1]}):' + ) + + def _solve_single_segment( + self, + i: int, + optimization: Optimization, + solver: _Solver, + log_file: pathlib.Path | None, + log_main_results: bool, + suppress_output: bool, + ) -> None: + """Solve a single segment optimization.""" + if i > 0 and self.nr_of_previous_values > 0: + self._transfer_start_values(i) + + optimization.do_modeling() + + # Check for unsupported Investments, but only in first run + if i == 0: + invest_elements = [ + model.label_full + for component in optimization.flow_system.components.values() + for model in component.submodel.all_submodels + if isinstance(model, InvestmentModel) + ] + if invest_elements: + raise ValueError( + f'Investments are not supported in SegmentedOptimization. ' + f'Found InvestmentModels: {invest_elements}. ' + f'Please use Optimization instead for problems with investments.' + ) + + log_path = pathlib.Path(log_file) if log_file is not None else self.folder / f'{self.name}.log' + + if suppress_output: + with fx_io.suppress_output(): + optimization.solve(solver, log_file=log_path, log_main_results=log_main_results) + else: + optimization.solve(solver, log_file=log_path, log_main_results=log_main_results) + + def do_modeling_and_solve( + self, + solver: _Solver, + log_file: pathlib.Path | None = None, + log_main_results: bool = False, + show_individual_solves: bool = False, + ) -> SegmentedOptimization: + """Model and solve all segments of the segmented optimization. + + This method creates sub-optimizations for each time segment, then iteratively + models and solves each segment. It supports two output modes: a progress bar + for compact output, or detailed individual solve information. + + Args: + solver: The solver instance to use for optimization (e.g., Gurobi, HiGHS). + log_file: Optional path to the solver log file. If None, defaults to + folder/name.log. + log_main_results: Whether to log main results (objective, effects, etc.) + after each segment solve. Defaults to False. + show_individual_solves: If True, shows detailed output for each segment + solve with logger messages. If False (default), shows a compact progress + bar with suppressed solver output for cleaner display. + + Returns: + Self, for method chaining. + + Note: + The method automatically transfers all start values between segments to ensure + continuity of storage states and flow rates across segment boundaries. + """ + logger.info(f'{"":#^80}') + logger.info(f'{" Segmented Solving ":#^80}') + self._create_sub_optimizations() + + if show_individual_solves: + # Path 1: Show individual solves with detailed output + for i, optimization in enumerate(self.sub_optimizations): + logger.info( + f'Solving segment {i + 1}/{len(self.sub_optimizations)}: ' + f'{optimization.flow_system.timesteps[0]} -> {optimization.flow_system.timesteps[-1]}' + ) + self._solve_single_segment(i, optimization, solver, log_file, log_main_results, suppress_output=False) + else: + # Path 2: Show only progress bar with suppressed output + progress_bar = tqdm( + enumerate(self.sub_optimizations), + total=len(self.sub_optimizations), + desc='Solving segments', + unit='segment', + file=sys.stdout, + disable=not CONFIG.Solving.log_to_console, + ) + + try: + for i, optimization in progress_bar: + progress_bar.set_description( + f'Solving ({optimization.flow_system.timesteps[0]} -> {optimization.flow_system.timesteps[-1]})' + ) + self._solve_single_segment( + i, optimization, solver, log_file, log_main_results, suppress_output=True + ) + finally: + progress_bar.close() + + for calc in self.sub_optimizations: + for key, value in calc.durations.items(): + self.durations[key] += value + + logger.log(SUCCESS_LEVEL, f'Model solved with {solver.name} in {self.durations["solving"]:.2f} seconds.') + + self.results = SegmentedResults.from_optimization(self) + + return self + + def _transfer_start_values(self, i: int): + """ + This function gets the last values of the previous solved segment and + inserts them as start values for the next segment + """ + timesteps_of_prior_segment = self.sub_optimizations[i - 1].flow_system.timesteps_extra + + start = self.sub_optimizations[i].flow_system.timesteps[0] + start_previous_values = timesteps_of_prior_segment[self.timesteps_per_segment - self.nr_of_previous_values] + end_previous_values = timesteps_of_prior_segment[self.timesteps_per_segment - 1] + + logger.debug( + f'Start of next segment: {start}. Indices of previous values: {start_previous_values} -> {end_previous_values}' + ) + current_flow_system = self.sub_optimizations[i - 1].flow_system + next_flow_system = self.sub_optimizations[i].flow_system + + start_values_of_this_segment = {} + + for current_flow in current_flow_system.flows.values(): + next_flow = next_flow_system.flows[current_flow.label_full] + next_flow.previous_flow_rate = current_flow.submodel.flow_rate.solution.sel( + time=slice(start_previous_values, end_previous_values) + ).values + start_values_of_this_segment[current_flow.label_full] = next_flow.previous_flow_rate + + for current_comp in current_flow_system.components.values(): + next_comp = next_flow_system.components[current_comp.label_full] + if isinstance(next_comp, Storage): + next_comp.initial_charge_state = current_comp.submodel.charge_state.solution.sel(time=start).item() + start_values_of_this_segment[current_comp.label_full] = next_comp.initial_charge_state + + self._transfered_start_values.append(start_values_of_this_segment) + + def _calculate_timesteps_per_segment(self) -> list[pd.DatetimeIndex]: + timesteps_per_segment = [] + for i, _ in enumerate(self.segment_names): + start = self.timesteps_per_segment * i + end = min(start + self.timesteps_per_segment_with_overlap, len(self.all_timesteps)) + timesteps_per_segment.append(self.all_timesteps[start:end]) + return timesteps_per_segment + + @property + def timesteps_per_segment_with_overlap(self): + return self.timesteps_per_segment + self.overlap_timesteps + + @property + def start_values_of_segments(self) -> list[dict[str, Any]]: + """Gives an overview of the start values of all Segments""" + return [{name: value for name, value in self._original_start_values.items()}] + [ + start_values for start_values in self._transfered_start_values + ] + + @property + def all_timesteps(self) -> pd.DatetimeIndex: + return self.flow_system.timesteps + + @property + def modeled(self) -> bool: + """Returns True if all segments have been modeled.""" + if len(self.sub_optimizations) == 0: + return False + return all(calc.modeled for calc in self.sub_optimizations) + + @property + def main_results(self) -> dict[str, int | float | dict]: + """Aggregated main results from all segments. + + Note: + For SegmentedOptimization, results are aggregated from SegmentedResults + which handles the overlapping segments properly. Individual segment results + should not be summed directly as they contain overlapping timesteps. + + The objective value shown is the sum of all segment objectives and includes + double-counting from overlapping regions. It does not represent a true + full-horizon objective value. + """ + if self.results is None: + raise RuntimeError( + 'SegmentedOptimization has not been solved yet. ' + 'Call do_modeling_and_solve() first to access main_results.' + ) + + # Use SegmentedResults to get the proper aggregated solution + return { + 'Note': 'SegmentedOptimization results are aggregated via SegmentedResults', + 'Number of segments': len(self.sub_optimizations), + 'Total timesteps': len(self.all_timesteps), + 'Objective (sum of segments, includes overlaps)': sum( + calc.model.objective.value for calc in self.sub_optimizations if calc.modeled + ), + } + + @property + def summary(self): + """Summary of the segmented optimization with aggregated information from all segments.""" + if len(self.sub_optimizations) == 0: + raise RuntimeError( + 'SegmentedOptimization has no segments yet. Call do_modeling_and_solve() first to access summary.' + ) + + # Aggregate constraints and variables from all segments + total_constraints = sum(calc.model.constraints.ncons for calc in self.sub_optimizations if calc.modeled) + total_variables = sum(calc.model.variables.nvars for calc in self.sub_optimizations if calc.modeled) + + return { + 'Name': self.name, + 'Number of timesteps': len(self.flow_system.timesteps), + 'Optimization Type': self.__class__.__name__, + 'Number of segments': len(self.sub_optimizations), + 'Timesteps per segment': self.timesteps_per_segment, + 'Overlap timesteps': self.overlap_timesteps, + 'Constraints (total across segments)': total_constraints, + 'Variables (total across segments)': total_variables, + 'Main Results': self.main_results if self.results else 'Not yet solved', + 'Durations': self.durations, + 'Config': CONFIG.to_dict(), + } + + @property + def active_timesteps(self) -> pd.DatetimeIndex | None: + warnings.warn( + f'active_timesteps is deprecated. Use flow_system.sel(time=...) or flow_system.isel(time=...) instead. ' + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', + DeprecationWarning, + stacklevel=2, + ) + return self._active_timesteps diff --git a/flixopt/plotting.py b/flixopt/plotting.py index 94959ecb5..0a8dfbc9b 100644 --- a/flixopt/plotting.py +++ b/flixopt/plotting.py @@ -25,9 +25,7 @@ from __future__ import annotations -import itertools import logging -import os import pathlib from typing import TYPE_CHECKING, Any, Literal diff --git a/flixopt/results.py b/flixopt/results.py index ccc36952f..6b9a1c580 100644 --- a/flixopt/results.py +++ b/flixopt/results.py @@ -15,17 +15,17 @@ from . import io as fx_io from . import plotting from .color_processing import process_colors -from .config import CONFIG +from .config import CONFIG, DEPRECATION_REMOVAL_VERSION, SUCCESS_LEVEL from .flow_system import FlowSystem -from .structure import CompositeContainerMixin, ElementContainer, ResultsContainer +from .structure import CompositeContainerMixin, ResultsContainer if TYPE_CHECKING: import matplotlib.pyplot as plt import plotly import pyvis - from .calculation import Calculation, SegmentedCalculation from .core import FlowSystemDimensions + from .optimization import Optimization, SegmentedOptimization logger = logging.getLogger('flixopt') @@ -53,8 +53,8 @@ class _FlowSystemRestorationError(Exception): pass -class CalculationResults(CompositeContainerMixin['ComponentResults | BusResults | EffectResults | FlowResults']): - """Comprehensive container for optimization calculation results and analysis tools. +class Results(CompositeContainerMixin['ComponentResults | BusResults | EffectResults | FlowResults']): + """Comprehensive container for optimization results and analysis tools. This class provides unified access to all optimization results including flow rates, component states, bus balances, and system effects. It offers powerful analysis @@ -73,27 +73,27 @@ class CalculationResults(CompositeContainerMixin['ComponentResults | BusResults - **Buses**: Network node balances and energy flows - **Effects**: System-wide impacts (costs, emissions, resource consumption) - **Solution**: Raw optimization variables and their values - - **Metadata**: Calculation parameters, timing, and system configuration + - **Metadata**: Optimization parameters, timing, and system configuration Attributes: solution: Dataset containing all optimization variable solutions flow_system_data: Dataset with complete system configuration and parameters. Restore the used FlowSystem for further analysis. - summary: Calculation metadata including solver status, timing, and statistics - name: Unique identifier for this calculation + summary: Optimization metadata including solver status, timing, and statistics + name: Unique identifier for this optimization model: Original linopy optimization model (if available) folder: Directory path for result storage and loading components: Dictionary mapping component labels to ComponentResults objects buses: Dictionary mapping bus labels to BusResults objects effects: Dictionary mapping effect names to EffectResults objects timesteps_extra: Extended time index including boundary conditions - hours_per_timestep: Duration of each timestep for proper energy calculations + hours_per_timestep: Duration of each timestep for proper energy optimizations Examples: Load and analyze saved results: ```python # Load results from file - results = CalculationResults.from_file('results', 'annual_optimization') + results = Results.from_file('results', 'annual_optimization') # Access specific component results boiler_results = results['Boiler_01'] @@ -140,7 +140,7 @@ class CalculationResults(CompositeContainerMixin['ComponentResults | BusResults ``` Design Patterns: - **Factory Methods**: Use `from_file()` and `from_calculation()` for creation or access directly from `Calculation.results` + **Factory Methods**: Use `from_file()` and `from_optimization()` for creation or access directly from `Optimization.results` **Dictionary Access**: Use `results[element_label]` for element-specific results **Lazy Loading**: Results objects created on-demand for memory efficiency **Unified Interface**: Consistent API across different result types @@ -150,18 +150,18 @@ class CalculationResults(CompositeContainerMixin['ComponentResults | BusResults model: linopy.Model | None @classmethod - def from_file(cls, folder: str | pathlib.Path, name: str) -> CalculationResults: - """Load CalculationResults from saved files. + def from_file(cls, folder: str | pathlib.Path, name: str) -> Results: + """Load Results from saved files. Args: folder: Directory containing saved files. name: Base name of saved files (without extensions). Returns: - CalculationResults: Loaded instance. + Results: Loaded instance. """ folder = pathlib.Path(folder) - paths = fx_io.CalculationResultsPaths(folder, name) + paths = fx_io.ResultsPaths(folder, name) model = None if paths.linopy_model.exists(): @@ -183,22 +183,22 @@ def from_file(cls, folder: str | pathlib.Path, name: str) -> CalculationResults: ) @classmethod - def from_calculation(cls, calculation: Calculation) -> CalculationResults: - """Create CalculationResults from a Calculation object. + def from_optimization(cls, optimization: Optimization) -> Results: + """Create Results from an Optimization instance. Args: - calculation: Calculation object with solved model. + optimization: The Optimization instance to extract results from. Returns: - CalculationResults: New instance with extracted results. + Results: New instance containing the optimization results. """ return cls( - solution=calculation.model.solution, - flow_system_data=calculation.flow_system.to_dataset(), - summary=calculation.summary, - model=calculation.model, - name=calculation.name, - folder=calculation.folder, + solution=optimization.model.solution, + flow_system_data=optimization.flow_system.to_dataset(), + summary=optimization.summary, + model=optimization.model, + name=optimization.name, + folder=optimization.folder, ) def __init__( @@ -211,29 +211,40 @@ def __init__( model: linopy.Model | None = None, **kwargs, # To accept old "flow_system" parameter ): - """Initialize CalculationResults with optimization data. - Usually, this class is instantiated by the Calculation class, or by loading from file. + """Initialize Results with optimization data. + Usually, this class is instantiated by an Optimization object via `Results.from_optimization()` + or by loading from file using `Results.from_file()`. Args: solution: Optimization solution dataset. flow_system_data: Flow system configuration dataset. - name: Calculation name. - summary: Calculation metadata. + name: Optimization name. + summary: Optimization metadata. folder: Results storage folder. model: Linopy optimization model. Deprecated: flow_system: Use flow_system_data instead. + + Note: + The legacy alias `CalculationResults` is deprecated. Use `Results` instead. """ # Handle potential old "flow_system" parameter for backward compatibility if 'flow_system' in kwargs and flow_system_data is None: flow_system_data = kwargs.pop('flow_system') warnings.warn( "The 'flow_system' parameter is deprecated. Use 'flow_system_data' instead. " - "Access is now via '.flow_system_data', while '.flow_system' returns the restored FlowSystem.", + "Access is now via '.flow_system_data', while '.flow_system' returns the restored FlowSystem. " + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) + # Validate that flow_system_data is provided + if flow_system_data is None: + raise TypeError( + "flow_system_data is required (or use deprecated 'flow_system' for backward compatibility)." + ) + self.solution = solution self.flow_system_data = flow_system_data self.summary = summary @@ -340,7 +351,7 @@ def effect_share_factors(self): @property def flow_system(self) -> FlowSystem: - """The restored flow_system that was used to create the calculation. + """The restored flow_system that was used to create the optimization. Contains all input parameters.""" if self._flow_system is None: # Temporarily disable all logging to suppress messages during restoration @@ -738,7 +749,7 @@ def _compute_effect_total( Args: element: The element identifier for which to calculate total effects. effect: The effect identifier to calculate. - mode: The calculation mode. Options are: + mode: The optimization mode. Options are: 'temporal': Returns temporal effects. 'periodic': Returns investment-specific effects. 'total': Returns the sum of temporal effects and periodic effects. Defaults to 'total'. @@ -806,7 +817,7 @@ def _create_template_for_mode(self, mode: Literal['temporal', 'periodic', 'total """Create a template DataArray with the correct dimensions for a given mode. Args: - mode: The calculation mode ('temporal', 'periodic', or 'total'). + mode: The optimization mode ('temporal', 'periodic', or 'total'). Returns: A DataArray filled with NaN, with dimensions appropriate for the mode. @@ -831,7 +842,7 @@ def _create_effects_dataset(self, mode: Literal['temporal', 'periodic', 'total'] The dataset does contain the direct as well as the indirect effects of each component. Args: - mode: The calculation mode ('temporal', 'periodic', or 'total'). + mode: The optimization mode ('temporal', 'periodic', or 'total'). Returns: An xarray Dataset with components as dimension and effects as variables. @@ -1055,27 +1066,41 @@ def to_file( compression: int = 5, document_model: bool = True, save_linopy_model: bool = False, + overwrite: bool = False, ): """Save results to files. Args: - folder: Save folder (defaults to calculation folder). - name: File name (defaults to calculation name). + folder: Save folder (defaults to optimization folder). + name: File name (defaults to optimization name). compression: Compression level 0-9. document_model: Whether to document model formulations as yaml. save_linopy_model: Whether to save linopy model file. + overwrite: If False, raise error if results files already exist. If True, overwrite existing files. + + Raises: + FileExistsError: If overwrite=False and result files already exist. """ folder = self.folder if folder is None else pathlib.Path(folder) name = self.name if name is None else name - if not folder.exists(): - try: - folder.mkdir(parents=False) - except FileNotFoundError as e: - raise FileNotFoundError( - f'Folder {folder} and its parent do not exist. Please create them first.' - ) from e - paths = fx_io.CalculationResultsPaths(folder, name) + # Ensure folder exists, creating parent directories as needed + folder.mkdir(parents=True, exist_ok=True) + + paths = fx_io.ResultsPaths(folder, name) + + # Check if files already exist (unless overwrite is True) + if not overwrite: + existing_files = [] + for file_path in paths.all_paths().values(): + if file_path.exists(): + existing_files.append(file_path.name) + + if existing_files: + raise FileExistsError( + f'Results files already exist in {folder}: {", ".join(existing_files)}. ' + f'Use overwrite=True to overwrite existing files.' + ) fx_io.save_dataset_to_netcdf(self.solution, paths.solution, compression=compression) fx_io.save_dataset_to_netcdf(self.flow_system_data, paths.flow_system, compression=compression) @@ -1084,29 +1109,60 @@ def to_file( if save_linopy_model: if self.model is None: - logger.critical('No model in the CalculationResults. Saving the model is not possible.') + logger.critical('No model in the Results. Saving the model is not possible.') else: self.model.to_netcdf(paths.linopy_model, engine='netcdf4') if document_model: if self.model is None: - logger.critical('No model in the CalculationResults. Documenting the model is not possible.') + logger.critical('No model in the Results. Documenting the model is not possible.') else: fx_io.document_linopy_model(self.model, path=paths.model_documentation) - logger.success(f'Saved calculation results "{name}" to {paths.model_documentation.parent}') + logger.log(SUCCESS_LEVEL, f'Saved optimization results "{name}" to {paths.model_documentation.parent}') + + +class CalculationResults(Results): + """DEPRECATED: Use Results instead. + + Backwards-compatible alias for Results class. + All functionality is inherited from Results. + """ + + def __init__(self, *args, **kwargs): + # Only warn if directly instantiating CalculationResults (not subclasses) + if self.__class__.__name__ == 'CalculationResults': + warnings.warn( + f'CalculationResults is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. Use Results instead.', + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) + + @classmethod + def from_calculation(cls, calculation: Optimization) -> CalculationResults: + """Create CalculationResults from a Calculation object. + + DEPRECATED: Use Results.from_optimization() instead. + Backwards-compatible method that redirects to from_optimization(). + + Args: + calculation: Calculation object with solved model. + + Returns: + CalculationResults: New instance with extracted results. + """ + return cls.from_optimization(calculation) class _ElementResults: - def __init__( - self, calculation_results: CalculationResults, label: str, variables: list[str], constraints: list[str] - ): - self._calculation_results = calculation_results + def __init__(self, results: Results, label: str, variables: list[str], constraints: list[str]): + self._results = results self.label = label self._variable_names = variables self._constraint_names = constraints - self.solution = self._calculation_results.solution[self._variable_names] + self.solution = self._results.solution[self._variable_names] @property def variables(self) -> linopy.Variables: @@ -1115,9 +1171,9 @@ def variables(self) -> linopy.Variables: Raises: ValueError: If linopy model is unavailable. """ - if self._calculation_results.model is None: + if self._results.model is None: raise ValueError('The linopy model is not available.') - return self._calculation_results.model.variables[self._variable_names] + return self._results.model.variables[self._variable_names] @property def constraints(self) -> linopy.Constraints: @@ -1126,9 +1182,9 @@ def constraints(self) -> linopy.Constraints: Raises: ValueError: If linopy model is unavailable. """ - if self._calculation_results.model is None: + if self._results.model is None: raise ValueError('The linopy model is not available.') - return self._calculation_results.model.constraints[self._constraint_names] + return self._results.model.constraints[self._constraint_names] def __repr__(self) -> str: """Return string representation with element info and dataset preview.""" @@ -1183,7 +1239,7 @@ def filter_solution( class _NodeResults(_ElementResults): def __init__( self, - calculation_results: CalculationResults, + results: Results, label: str, variables: list[str], constraints: list[str], @@ -1191,7 +1247,7 @@ def __init__( outputs: list[str], flows: list[str], ): - super().__init__(calculation_results, label, variables, constraints) + super().__init__(results, label, variables, constraints) self.inputs = inputs self.outputs = outputs self.flows = flows @@ -1319,10 +1375,9 @@ def plot_node_balance( "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." ) - import warnings - warnings.warn( - "The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead.", + f"The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead. " + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -1358,7 +1413,7 @@ def plot_node_balance( ds, facet_by=facet_by, animate_by=animate_by, - colors=colors if colors is not None else self._calculation_results.colors, + colors=colors if colors is not None else self._results.colors, mode=mode, title=title, facet_cols=facet_cols, @@ -1369,7 +1424,7 @@ def plot_node_balance( else: figure_like = plotting.with_matplotlib( ds, - colors=colors if colors is not None else self._calculation_results.colors, + colors=colors if colors is not None else self._results.colors, mode=mode, title=title, **plot_kwargs, @@ -1378,7 +1433,7 @@ def plot_node_balance( return plotting.export_figure( figure_like=figure_like, - default_path=self._calculation_results.folder / title, + default_path=self._results.folder / title, default_filetype=default_filetype, user_path=None if isinstance(save, bool) else pathlib.Path(save), show=show, @@ -1454,10 +1509,9 @@ def plot_node_balance_pie( "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." ) - import warnings - warnings.warn( - "The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead.", + f"The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead. " + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -1467,14 +1521,14 @@ def plot_node_balance_pie( dpi = plot_kwargs.pop('dpi', None) # None uses CONFIG.Plotting.default_dpi inputs = sanitize_dataset( - ds=self.solution[self.inputs] * self._calculation_results.hours_per_timestep, + ds=self.solution[self.inputs] * self._results.hours_per_timestep, threshold=1e-5, drop_small_vars=True, zero_small_values=True, drop_suffix='|', ) outputs = sanitize_dataset( - ds=self.solution[self.outputs] * self._calculation_results.hours_per_timestep, + ds=self.solution[self.outputs] * self._results.hours_per_timestep, threshold=1e-5, drop_small_vars=True, zero_small_values=True, @@ -1526,7 +1580,7 @@ def plot_node_balance_pie( figure_like = plotting.dual_pie_with_plotly( data_left=inputs, data_right=outputs, - colors=colors if colors is not None else self._calculation_results.colors, + colors=colors if colors is not None else self._results.colors, title=title, text_info=text_info, subtitles=('Inputs', 'Outputs'), @@ -1540,7 +1594,7 @@ def plot_node_balance_pie( figure_like = plotting.dual_pie_with_matplotlib( data_left=inputs.to_pandas(), data_right=outputs.to_pandas(), - colors=colors if colors is not None else self._calculation_results.colors, + colors=colors if colors is not None else self._results.colors, title=title, subtitles=('Inputs', 'Outputs'), legend_title='Flows', @@ -1553,7 +1607,7 @@ def plot_node_balance_pie( return plotting.export_figure( figure_like=figure_like, - default_path=self._calculation_results.folder / title, + default_path=self._results.folder / title, default_filetype=default_filetype, user_path=None if isinstance(save, bool) else pathlib.Path(save), show=show, @@ -1594,10 +1648,9 @@ def node_balance( "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." ) - import warnings - warnings.warn( - "The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead.", + f"The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead. " + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -1608,7 +1661,7 @@ def node_balance( ds = sanitize_dataset( ds=ds, threshold=threshold, - timesteps=self._calculation_results.timesteps_extra if with_last_timestep else None, + timesteps=self._results.timesteps_extra if with_last_timestep else None, negate=( self.outputs + self.inputs if negate_outputs and negate_inputs @@ -1624,7 +1677,7 @@ def node_balance( ds, _ = _apply_selection_to_data(ds, select=select, drop=True) if unit_type == 'flow_hours': - ds = ds * self._calculation_results.hours_per_timestep + ds = ds * self._results.hours_per_timestep ds = ds.rename_vars({var: var.replace('flow_rate', 'flow_hours') for var in ds.data_vars}) return ds @@ -1741,10 +1794,9 @@ def plot_charge_state( "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." ) - import warnings - warnings.warn( - "The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead.", + f"The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead. " + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -1776,7 +1828,7 @@ def plot_charge_state( ds, facet_by=facet_by, animate_by=animate_by, - colors=colors if colors is not None else self._calculation_results.colors, + colors=colors if colors is not None else self._results.colors, mode=mode, title=title, facet_cols=facet_cols, @@ -1792,7 +1844,7 @@ def plot_charge_state( charge_state_ds, facet_by=facet_by, animate_by=animate_by, - colors=colors if colors is not None else self._calculation_results.colors, + colors=colors if colors is not None else self._results.colors, mode='line', # Always line for charge_state title='', # No title needed for this temp figure facet_cols=facet_cols, @@ -1832,7 +1884,7 @@ def plot_charge_state( # For matplotlib, plot flows (node balance), then add charge_state as line fig, ax = plotting.with_matplotlib( ds, - colors=colors if colors is not None else self._calculation_results.colors, + colors=colors if colors is not None else self._results.colors, mode=mode, title=title, **plot_kwargs, @@ -1864,7 +1916,7 @@ def plot_charge_state( return plotting.export_figure( figure_like=figure_like, - default_path=self._calculation_results.folder / title, + default_path=self._results.folder / title, default_filetype=default_filetype, user_path=None if isinstance(save, bool) else pathlib.Path(save), show=show, @@ -1894,7 +1946,7 @@ def node_balance_with_charge_state( return sanitize_dataset( ds=self.solution[variable_names], threshold=threshold, - timesteps=self._calculation_results.timesteps_extra, + timesteps=self._results.timesteps_extra, negate=( self.outputs + self.inputs if negate_outputs and negate_inputs @@ -1925,7 +1977,7 @@ def get_shares_from(self, element: str) -> xr.Dataset: class FlowResults(_ElementResults): def __init__( self, - calculation_results: CalculationResults, + results: Results, label: str, variables: list[str], constraints: list[str], @@ -1933,7 +1985,7 @@ def __init__( end: str, component: str, ): - super().__init__(calculation_results, label, variables, constraints) + super().__init__(results, label, variables, constraints) self.start = start self.end = end self.component = component @@ -1944,7 +1996,7 @@ def flow_rate(self) -> xr.DataArray: @property def flow_hours(self) -> xr.DataArray: - return (self.flow_rate * self._calculation_results.hours_per_timestep).rename(f'{self.label}|flow_hours') + return (self.flow_rate * self._results.hours_per_timestep).rename(f'{self.label}|flow_hours') @property def size(self) -> xr.DataArray: @@ -1952,16 +2004,16 @@ def size(self) -> xr.DataArray: if name in self.solution: return self.solution[name] try: - return self._calculation_results.flow_system.flows[self.label].size.rename(name) + return self._results.flow_system.flows[self.label].size.rename(name) except _FlowSystemRestorationError: logger.critical(f'Size of flow {self.label}.size not availlable. Returning NaN') return xr.DataArray(np.nan).rename(name) -class SegmentedCalculationResults: - """Results container for segmented optimization calculations with temporal decomposition. +class SegmentedResults: + """Results container for segmented optimization optimizations with temporal decomposition. - This class manages results from SegmentedCalculation runs where large optimization + This class manages results from SegmentedOptimization runs where large optimization problems are solved by dividing the time horizon into smaller, overlapping segments. It provides unified access to results across all segments while maintaining the ability to analyze individual segment behavior. @@ -1984,8 +2036,8 @@ class SegmentedCalculationResults: Load and analyze segmented results: ```python - # Load segmented calculation results - results = SegmentedCalculationResults.from_file('results', 'annual_segmented') + # Load segmented optimization results + results = SegmentedResults.from_file('results', 'annual_segmented') # Access unified results across all segments full_timeline = results.all_timesteps @@ -2001,20 +2053,20 @@ class SegmentedCalculationResults: max_discontinuity = segment_boundaries['max_storage_jump'] ``` - Create from segmented calculation: + Create from segmented optimization: ```python - # After running segmented calculation - segmented_calc = SegmentedCalculation( + # After running segmented optimization + segmented_opt = SegmentedOptimization( name='annual_system', flow_system=system, timesteps_per_segment=730, # Monthly segments overlap_timesteps=48, # 2-day overlap ) - segmented_calc.do_modeling_and_solve(solver='gurobi') + segmented_opt.do_modeling_and_solve(solver='gurobi') # Extract unified results - results = SegmentedCalculationResults.from_calculation(segmented_calc) + results = SegmentedResults.from_optimization(segmented_opt) # Save combined results results.to_file(compression=5) @@ -2055,33 +2107,50 @@ class SegmentedCalculationResults: """ @classmethod - def from_calculation(cls, calculation: SegmentedCalculation): + def from_optimization(cls, optimization: SegmentedOptimization) -> SegmentedResults: + """Create SegmentedResults from a SegmentedOptimization instance. + + Args: + optimization: The SegmentedOptimization instance to extract results from. + + Returns: + SegmentedResults: New instance containing the optimization results. + """ return cls( - [calc.results for calc in calculation.sub_calculations], - all_timesteps=calculation.all_timesteps, - timesteps_per_segment=calculation.timesteps_per_segment, - overlap_timesteps=calculation.overlap_timesteps, - name=calculation.name, - folder=calculation.folder, + [calc.results for calc in optimization.sub_optimizations], + all_timesteps=optimization.all_timesteps, + timesteps_per_segment=optimization.timesteps_per_segment, + overlap_timesteps=optimization.overlap_timesteps, + name=optimization.name, + folder=optimization.folder, ) @classmethod - def from_file(cls, folder: str | pathlib.Path, name: str) -> SegmentedCalculationResults: - """Load SegmentedCalculationResults from saved files. + def from_file(cls, folder: str | pathlib.Path, name: str) -> SegmentedResults: + """Load SegmentedResults from saved files. Args: folder: Directory containing saved files. name: Base name of saved files. Returns: - SegmentedCalculationResults: Loaded instance. + SegmentedResults: Loaded instance. """ folder = pathlib.Path(folder) path = folder / name - logger.info(f'loading calculation "{name}" from file ("{path.with_suffix(".nc4")}")') - meta_data = fx_io.load_json(path.with_suffix('.json')) + meta_data_path = path.with_suffix('.json') + logger.info(f'loading segemented optimization meta data from file ("{meta_data_path}")') + meta_data = fx_io.load_json(meta_data_path) + + # Handle both new 'sub_optimizations' and legacy 'sub_calculations' keys + sub_names = meta_data.get('sub_optimizations') or meta_data.get('sub_calculations') + if sub_names is None: + raise KeyError( + "Missing 'sub_optimizations' (or legacy 'sub_calculations') key in segmented results metadata." + ) + return cls( - [CalculationResults.from_file(folder, sub_name) for sub_name in meta_data['sub_calculations']], + [Results.from_file(folder, sub_name) for sub_name in sub_names], all_timesteps=pd.DatetimeIndex( [datetime.datetime.fromisoformat(date) for date in meta_data['all_timesteps']], name='time' ), @@ -2093,7 +2162,7 @@ def from_file(cls, folder: str | pathlib.Path, name: str) -> SegmentedCalculatio def __init__( self, - segment_results: list[CalculationResults], + segment_results: list[Results], all_timesteps: pd.DatetimeIndex, timesteps_per_segment: int, overlap_timesteps: int, @@ -2106,7 +2175,6 @@ def __init__( self.overlap_timesteps = overlap_timesteps self.name = name self.folder = pathlib.Path(folder) if folder is not None else pathlib.Path.cwd() / 'results' - self.hours_per_timestep = FlowSystem.calculate_hours_per_timestep(self.all_timesteps) self._colors = {} @property @@ -2115,7 +2183,7 @@ def meta_data(self) -> dict[str, int | list[str]]: 'all_timesteps': [datetime.datetime.isoformat(date) for date in self.all_timesteps], 'timesteps_per_segment': self.timesteps_per_segment, 'overlap_timesteps': self.overlap_timesteps, - 'sub_calculations': [calc.name for calc in self.segment_results], + 'sub_optimizations': [calc.name for calc in self.segment_results], } @property @@ -2142,8 +2210,8 @@ def setup_colors( Setup colors for all variables across all segment results. This method applies the same color configuration to all segments, ensuring - consistent visualization across the entire segmented calculation. The color - mapping is propagated to each segment's CalculationResults instance. + consistent visualization across the entire segmented optimization. The color + mapping is propagated to each segment's Results instance. Args: config: Configuration for color assignment. Can be: @@ -2176,6 +2244,9 @@ def setup_colors( Complete variable-to-color mapping dictionary from the first segment (all segments will have the same mapping) """ + if not self.segment_results: + raise ValueError('No segment_results available; cannot setup colors on an empty SegmentedResults.') + self.colors = self.segment_results[0].setup_colors(config=config, default_colorscale=default_colorscale) return self.colors @@ -2258,11 +2329,10 @@ def plot_heatmap( "and new parameter 'reshape_time'. Use only 'reshape_time'." ) - import warnings - warnings.warn( "The 'heatmap_timeframes' and 'heatmap_timesteps_per_frame' parameters are deprecated. " - "Use 'reshape_time=(timeframes, timesteps_per_frame)' instead.", + f"Use 'reshape_time=(timeframes, timesteps_per_frame)' instead. " + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -2277,10 +2347,9 @@ def plot_heatmap( "Cannot use both deprecated parameter 'color_map' and new parameter 'colors'. Use only 'colors'." ) - import warnings - warnings.warn( - "The 'color_map' parameter is deprecated. Use 'colors' instead.", + f"The 'color_map' parameter is deprecated. Use 'colors' instead. " + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -2302,29 +2371,79 @@ def plot_heatmap( **plot_kwargs, ) - def to_file(self, folder: str | pathlib.Path | None = None, name: str | None = None, compression: int = 5): + def to_file( + self, + folder: str | pathlib.Path | None = None, + name: str | None = None, + compression: int = 5, + overwrite: bool = False, + ): """Save segmented results to files. Args: folder: Save folder (defaults to instance folder). name: File name (defaults to instance name). compression: Compression level 0-9. + overwrite: If False, raise error if results files already exist. If True, overwrite existing files. + + Raises: + FileExistsError: If overwrite=False and result files already exist. """ folder = self.folder if folder is None else pathlib.Path(folder) name = self.name if name is None else name path = folder / name - if not folder.exists(): - try: - folder.mkdir(parents=False) - except FileNotFoundError as e: - raise FileNotFoundError( - f'Folder {folder} and its parent do not exist. Please create them first.' - ) from e + + # Ensure folder exists, creating parent directories as needed + folder.mkdir(parents=True, exist_ok=True) + + # Check if metadata file already exists (unless overwrite is True) + metadata_file = path.with_suffix('.json') + if not overwrite and metadata_file.exists(): + raise FileExistsError( + f'Segmented results file already exists: {metadata_file}. ' + f'Use overwrite=True to overwrite existing files.' + ) + + # Save segments (they will check for overwrite themselves) for segment in self.segment_results: - segment.to_file(folder=folder, name=segment.name, compression=compression) + segment.to_file(folder=folder, name=segment.name, compression=compression, overwrite=overwrite) - fx_io.save_json(self.meta_data, path.with_suffix('.json')) - logger.info(f'Saved calculation "{name}" to {path}') + fx_io.save_json(self.meta_data, metadata_file) + logger.info(f'Saved optimization "{name}" to {path}') + + +class SegmentedCalculationResults(SegmentedResults): + """DEPRECATED: Use SegmentedResults instead. + + Backwards-compatible alias for SegmentedResults class. + All functionality is inherited from SegmentedResults. + """ + + def __init__(self, *args, **kwargs): + # Only warn if directly instantiating SegmentedCalculationResults (not subclasses) + if self.__class__.__name__ == 'SegmentedCalculationResults': + warnings.warn( + f'SegmentedCalculationResults is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + 'Use SegmentedResults instead.', + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) + + @classmethod + def from_calculation(cls, calculation: SegmentedOptimization) -> SegmentedCalculationResults: + """Create SegmentedCalculationResults from a SegmentedCalculation object. + + DEPRECATED: Use SegmentedResults.from_optimization() instead. + Backwards-compatible method that redirects to from_optimization(). + + Args: + calculation: SegmentedCalculation object with solved model. + + Returns: + SegmentedCalculationResults: New instance with extracted results. + """ + return cls.from_optimization(calculation) def plot_heatmap( @@ -2353,7 +2472,7 @@ def plot_heatmap( """Plot heatmap visualization with support for multi-variable, faceting, and animation. This function provides a standalone interface to the heatmap plotting capabilities, - supporting the same modern features as CalculationResults.plot_heatmap(). + supporting the same modern features as Results.plot_heatmap(). Args: data: Data to plot. Can be a single DataArray or an xarray Dataset. @@ -2405,11 +2524,10 @@ def plot_heatmap( "and new parameter 'reshape_time'. Use only 'reshape_time'." ) - import warnings - warnings.warn( "The 'heatmap_timeframes' and 'heatmap_timesteps_per_frame' parameters are deprecated. " - "Use 'reshape_time=(timeframes, timesteps_per_frame)' instead.", + "Use 'reshape_time=(timeframes, timesteps_per_frame)' instead. " + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -2424,10 +2542,9 @@ def plot_heatmap( "Cannot use both deprecated parameter 'color_map' and new parameter 'colors'. Use only 'colors'." ) - import warnings - warnings.warn( - "The 'color_map' parameter is deprecated. Use 'colors' instead.", + f"The 'color_map' parameter is deprecated. Use 'colors' instead." + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) @@ -2441,10 +2558,9 @@ def plot_heatmap( "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." ) - import warnings - warnings.warn( - "The 'indexer' parameter is deprecated. Use 'select' instead.", + f"The 'indexer' parameter is deprecated. Use 'select' instead. " + f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', DeprecationWarning, stacklevel=2, ) diff --git a/mkdocs.yml b/mkdocs.yml index 61d33e233..0adba464d 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,4 +1,4 @@ -# flixOpt Documentation Configuration +# FlixOpt Documentation Configuration # https://mkdocstrings.github.io/python/usage/configuration/docstrings/ # https://squidfunk.github.io/mkdocs-material/setup/ @@ -278,10 +278,10 @@ extra: social: - icon: fontawesome/brands/github link: https://github.com/flixOpt/flixopt - name: flixOpt on GitHub + name: FlixOpt on GitHub - icon: fontawesome/brands/python link: https://pypi.org/project/flixopt/ - name: flixOpt on PyPI + name: FlixOpt on PyPI analytics: provider: google diff --git a/pyproject.toml b/pyproject.toml index d7510b1ce..258b0ab7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta" [project] name = "flixopt" dynamic = ["version"] -description = "Vector based energy and material flow optimization framework in Python." +description = "Progressive flow system optimization in Python - start simple, scale to complex." readme = "README.md" requires-python = ">=3.10" license = "MIT" @@ -88,7 +88,7 @@ dev = [ "tsam==2.3.9", "scipy==1.15.1", "gurobipy==12.0.3", - "dash==3.2.0", + "dash==3.3.0", "dash-cytoscape==1.0.2", "dash-daq==0.6.0", "networkx==3.0.0", @@ -162,7 +162,6 @@ select = [ "TCH", # flake8-type-checking (optimize imports for type checking) ] ignore = [ # Ignore specific rules - "F401", # Allow unused imports in some cases (use __all__) "UP038", "E501" # ignore long lines ] diff --git a/scripts/extract_changelog.py b/scripts/extract_changelog.py index d05229896..44790fec6 100644 --- a/scripts/extract_changelog.py +++ b/scripts/extract_changelog.py @@ -4,7 +4,6 @@ Simple script to create one file per release. """ -import os import re from pathlib import Path diff --git a/tests/conftest.py b/tests/conftest.py index 93d3c9f0e..b7acee446 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -558,11 +558,11 @@ def flow_system_long(): thermal_load_ts, electrical_load_ts = ( fx.TimeSeriesData(thermal_load), - fx.TimeSeriesData(electrical_load, aggregation_weight=0.7), + fx.TimeSeriesData(electrical_load, clustering_weight=0.7), ) p_feed_in, p_sell = ( - fx.TimeSeriesData(-(p_el - 0.5), aggregation_group='p_el'), - fx.TimeSeriesData(p_el + 0.5, aggregation_group='p_el'), + fx.TimeSeriesData(-(p_el - 0.5), clustering_group='p_el'), + fx.TimeSeriesData(p_el + 0.5, clustering_group='p_el'), ) flow_system = fx.FlowSystem(pd.DatetimeIndex(data.index)) @@ -703,19 +703,17 @@ def assert_almost_equal_numeric( np.testing.assert_allclose(actual, desired, rtol=relative_tol, atol=absolute_tolerance, err_msg=err_msg) -def create_calculation_and_solve( +def create_optimization_and_solve( flow_system: fx.FlowSystem, solver, name: str, allow_infeasible: bool = False -) -> fx.FullCalculation: - calculation = fx.FullCalculation(name, flow_system) - calculation.do_modeling() +) -> fx.Optimization: + optimization = fx.Optimization(name, flow_system) + optimization.do_modeling() try: - calculation.solve(solver) - except RuntimeError as e: - if allow_infeasible: - pass - else: - raise RuntimeError from e - return calculation + optimization.solve(solver) + except RuntimeError: + if not allow_infeasible: + raise + return optimization def create_linopy_model(flow_system: fx.FlowSystem) -> FlowSystemModel: @@ -726,11 +724,11 @@ def create_linopy_model(flow_system: fx.FlowSystem) -> FlowSystemModel: flow_system: The FlowSystem to build the model from. Returns: - FlowSystemModel: The built model from FullCalculation.do_modeling(). + FlowSystemModel: The built model from Optimization.do_modeling(). """ - calculation = fx.FullCalculation('GenericName', flow_system) - calculation.do_modeling() - return calculation.model + optimization = fx.Optimization('GenericName', flow_system) + optimization.do_modeling() + return optimization.model def assert_conequal(actual: linopy.Constraint, desired: linopy.Constraint): diff --git a/tests/test_bus.py b/tests/test_bus.py index 0a5b19d8d..f1497a0ec 100644 --- a/tests/test_bus.py +++ b/tests/test_bus.py @@ -60,11 +60,23 @@ def test_bus_penalty(self, basic_flow_system_linopy_coords, coords_config): == 0, ) + # Penalty is now added as shares to the Penalty effect's temporal model + # Check that the penalty shares exist + assert 'TestBus->Penalty(temporal)' in model.constraints + assert 'TestBus->Penalty(temporal)' in model.variables + + # The penalty share should equal the excess times the penalty cost + # Note: Each excess (input and output) creates its own share constraint, so we have two + # Let's verify the total penalty contribution by checking the effect's temporal model + penalty_effect = flow_system.effects.penalty_effect + assert penalty_effect.submodel is not None + assert 'TestBus' in penalty_effect.submodel.temporal.shares + assert_conequal( - model.constraints['TestBus->Penalty'], - model.variables['TestBus->Penalty'] - == (model.variables['TestBus|excess_input'] * 1e5 * model.hours_per_step).sum() - + (model.variables['TestBus|excess_output'] * 1e5 * model.hours_per_step).sum(), + model.constraints['TestBus->Penalty(temporal)'], + model.variables['TestBus->Penalty(temporal)'] + == model.variables['TestBus|excess_input'] * 1e5 * model.hours_per_step + + model.variables['TestBus|excess_output'] * 1e5 * model.hours_per_step, ) def test_bus_with_coords(self, basic_flow_system_linopy_coords, coords_config): diff --git a/tests/test_component.py b/tests/test_component.py index dbbd85c8f..c33aaf437 100644 --- a/tests/test_component.py +++ b/tests/test_component.py @@ -9,8 +9,8 @@ assert_conequal, assert_sets_equal, assert_var_equal, - create_calculation_and_solve, create_linopy_model, + create_optimization_and_solve, ) @@ -434,7 +434,7 @@ def test_transmission_basic(self, basic_flow_system, highs_solver): flow_system.add_elements(transmission, boiler) - _ = create_calculation_and_solve(flow_system, highs_solver, 'test_transmission_basic') + _ = create_optimization_and_solve(flow_system, highs_solver, 'test_transmission_basic') # Assertions assert_almost_equal_numeric( @@ -498,7 +498,7 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver): flow_system.add_elements(transmission, boiler, boiler2, last2) - calculation = create_calculation_and_solve(flow_system, highs_solver, 'test_transmission_advanced') + optimization = create_optimization_and_solve(flow_system, highs_solver, 'test_transmission_advanced') # Assertions assert_almost_equal_numeric( @@ -508,7 +508,7 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver): ) assert_almost_equal_numeric( - calculation.results.model.variables['Rohr(Rohr1b)|flow_rate'].solution.values, + optimization.results.model.variables['Rohr(Rohr1b)|flow_rate'].solution.values, transmission.out1.submodel.flow_rate.solution.values, 'Flow rate of Rohr__Rohr1b is not correct', ) @@ -579,7 +579,7 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver): flow_system.add_elements(transmission, boiler, boiler2, last2) - calculation = create_calculation_and_solve(flow_system, highs_solver, 'test_transmission_advanced') + optimization = create_optimization_and_solve(flow_system, highs_solver, 'test_transmission_advanced') # Assertions assert_almost_equal_numeric( @@ -589,7 +589,7 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver): ) assert_almost_equal_numeric( - calculation.results.model.variables['Rohr(Rohr1b)|flow_rate'].solution.values, + optimization.results.model.variables['Rohr(Rohr1b)|flow_rate'].solution.values, transmission.out1.submodel.flow_rate.solution.values, 'Flow rate of Rohr__Rohr1b is not correct', ) diff --git a/tests/test_config.py b/tests/test_config.py index b09e0c5d9..9c4f423ee 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -5,7 +5,7 @@ import pytest -from flixopt.config import CONFIG, MultilineFormatter +from flixopt.config import CONFIG, SUCCESS_LEVEL, MultilineFormatter logger = logging.getLogger('flixopt') @@ -75,9 +75,69 @@ def test_disable_logging(self, capfd): def test_custom_success_level(self, capfd): """Test custom SUCCESS log level.""" CONFIG.Logging.enable_console('INFO') - logger.success('success message') + logger.log(SUCCESS_LEVEL, 'success message') assert 'success message' in capfd.readouterr().out + def test_success_level_as_minimum(self, capfd): + """Test setting SUCCESS as minimum log level.""" + CONFIG.Logging.enable_console('SUCCESS') + + # INFO should not appear (level 20 < 25) + logger.info('info message') + assert 'info message' not in capfd.readouterr().out + + # SUCCESS should appear (level 25) + logger.log(SUCCESS_LEVEL, 'success message') + assert 'success message' in capfd.readouterr().out + + # WARNING should appear (level 30 > 25) + logger.warning('warning message') + assert 'warning message' in capfd.readouterr().out + + def test_success_level_numeric(self, capfd): + """Test setting SUCCESS level using numeric value.""" + CONFIG.Logging.enable_console(25) + logger.log(25, 'success with numeric level') + assert 'success with numeric level' in capfd.readouterr().out + + def test_success_level_constant(self, capfd): + """Test using SUCCESS_LEVEL constant.""" + CONFIG.Logging.enable_console(SUCCESS_LEVEL) + logger.log(SUCCESS_LEVEL, 'success with constant') + assert 'success with constant' in capfd.readouterr().out + assert SUCCESS_LEVEL == 25 + + def test_success_file_logging(self, tmp_path): + """Test SUCCESS level with file logging.""" + log_file = tmp_path / 'test_success.log' + CONFIG.Logging.enable_file('SUCCESS', str(log_file)) + + # INFO should not be logged + logger.info('info not logged') + + # SUCCESS should be logged + logger.log(SUCCESS_LEVEL, 'success logged to file') + + content = log_file.read_text() + assert 'info not logged' not in content + assert 'success logged to file' in content + + def test_success_color_customization(self, capfd): + """Test customizing SUCCESS level color.""" + CONFIG.Logging.enable_console('SUCCESS') + + # Customize SUCCESS color + CONFIG.Logging.set_colors( + { + 'SUCCESS': 'bold_green,bg_black', + 'WARNING': 'yellow', + } + ) + + logger.log(SUCCESS_LEVEL, 'colored success') + output = capfd.readouterr().out + assert 'colored success' in output + def test_multiline_formatting(self): """Test that multi-line messages get box borders.""" formatter = MultilineFormatter() diff --git a/tests/test_dataconverter.py b/tests/test_dataconverter.py index 0f12a1af3..a5774fd6b 100644 --- a/tests/test_dataconverter.py +++ b/tests/test_dataconverter.py @@ -496,7 +496,7 @@ class TestTimeSeriesDataConversion: def test_timeseries_data_basic(self, time_coords): """TimeSeriesData should work like DataArray.""" data_array = xr.DataArray([10, 20, 30, 40, 50], coords={'time': time_coords}, dims='time') - ts_data = TimeSeriesData(data_array, aggregation_group='test') + ts_data = TimeSeriesData(data_array, clustering_group='test') result = DataConverter.to_dataarray(ts_data, coords={'time': time_coords}) diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py new file mode 100644 index 000000000..c77d794a5 --- /dev/null +++ b/tests/test_deprecations.py @@ -0,0 +1,613 @@ +"""Comprehensive pytest-based test for all deprecation warnings with v5.0.0 removal message.""" + +import warnings + +import numpy as np +import pandas as pd +import pytest +import xarray as xr + +import flixopt as fx +from flixopt.config import DEPRECATION_REMOVAL_VERSION, change_logging_level +from flixopt.linear_converters import CHP, Boiler, HeatPump, HeatPumpWithSource, Power2Heat +from flixopt.results import plot_heatmap + + +# === Parameter deprecations (via _handle_deprecated_kwarg) === +@pytest.mark.parametrize( + 'name,factory', + [ + ("Source 'source'", lambda: fx.Source('s1', source=fx.Flow('out1', bus='bus', size=10))), + ("Sink 'sink'", lambda: fx.Sink('sink1', sink=fx.Flow('in2', bus='bus', size=10))), + ("InvestParameters 'fix_effects'", lambda: fx.InvestParameters(minimum_size=10, fix_effects={'costs': 100})), + ( + "InvestParameters 'specific_effects'", + lambda: fx.InvestParameters(minimum_size=10, specific_effects={'costs': 10}), + ), + ( + "InvestParameters 'divest_effects'", + lambda: fx.InvestParameters(minimum_size=10, divest_effects={'costs': 50}), + ), + ( + "InvestParameters 'piecewise_effects'", + lambda: fx.InvestParameters(minimum_size=10, piecewise_effects=[]), + ), + ("InvestParameters 'optional'", lambda: fx.InvestParameters(minimum_size=10, optional=True)), + ("OnOffParameters 'on_hours_total_min'", lambda: fx.OnOffParameters(on_hours_total_min=10)), + ("OnOffParameters 'on_hours_total_max'", lambda: fx.OnOffParameters(on_hours_total_max=20)), + ("OnOffParameters 'switch_on_total_max'", lambda: fx.OnOffParameters(switch_on_total_max=5)), + ("Flow 'flow_hours_total_min'", lambda: fx.Flow('f1', bus='bus', size=10, flow_hours_total_min=5)), + ("Flow 'flow_hours_total_max'", lambda: fx.Flow('f2', bus='bus', size=10, flow_hours_total_max=20)), + ( + "Flow 'flow_hours_per_period_min'", + lambda: fx.Flow('f3', bus='bus', size=10, flow_hours_per_period_min=5), + ), + ( + "Flow 'flow_hours_per_period_max'", + lambda: fx.Flow('f4', bus='bus', size=10, flow_hours_per_period_max=20), + ), + ("Flow 'total_flow_hours_min'", lambda: fx.Flow('f5', bus='bus', size=10, total_flow_hours_min=5)), + ("Flow 'total_flow_hours_max'", lambda: fx.Flow('f6', bus='bus', size=10, total_flow_hours_max=20)), + ( + "Effect 'minimum_operation'", + lambda: fx.Effect('e1', unit='€', description='test', minimum_operation=100), + ), + ( + "Effect 'maximum_operation'", + lambda: fx.Effect('e2', unit='€', description='test', maximum_operation=200), + ), + ("Effect 'minimum_invest'", lambda: fx.Effect('e3', unit='€', description='test', minimum_invest=50)), + ("Effect 'maximum_invest'", lambda: fx.Effect('e4', unit='€', description='test', maximum_invest=150)), + ( + "Effect 'minimum_operation_per_hour'", + lambda: fx.Effect('e5', unit='€', description='test', minimum_operation_per_hour=10), + ), + ( + "Effect 'maximum_operation_per_hour'", + lambda: fx.Effect('e6', unit='€', description='test', maximum_operation_per_hour=30), + ), + # Linear converters + ( + "Boiler 'Q_fu'", + lambda: Boiler( + 'b1', Q_fu=fx.Flow('f1', 'bus', 10), thermal_flow=fx.Flow('h1', 'bus', 9), thermal_efficiency=0.9 + ), + ), + ( + "Boiler 'Q_th'", + lambda: Boiler( + 'b2', fuel_flow=fx.Flow('f2', 'bus', 10), Q_th=fx.Flow('h2', 'bus', 9), thermal_efficiency=0.9 + ), + ), + ( + "Boiler 'eta'", + lambda: Boiler('b3', fuel_flow=fx.Flow('f3', 'bus', 10), thermal_flow=fx.Flow('h3', 'bus', 9), eta=0.9), + ), + ( + "Power2Heat 'P_el'", + lambda: Power2Heat( + 'p1', P_el=fx.Flow('e1', 'bus', 10), thermal_flow=fx.Flow('h4', 'bus', 9), thermal_efficiency=0.9 + ), + ), + ( + "Power2Heat 'Q_th'", + lambda: Power2Heat( + 'p2', electrical_flow=fx.Flow('e2', 'bus', 10), Q_th=fx.Flow('h5', 'bus', 9), thermal_efficiency=0.9 + ), + ), + ( + "Power2Heat 'eta'", + lambda: Power2Heat( + 'p3', electrical_flow=fx.Flow('e3', 'bus', 10), thermal_flow=fx.Flow('h6', 'bus', 9), eta=0.9 + ), + ), + ( + "HeatPump 'P_el'", + lambda: HeatPump('hp1', P_el=fx.Flow('e4', 'bus', 10), thermal_flow=fx.Flow('h7', 'bus', 30), cop=3.0), + ), + ( + "HeatPump 'Q_th'", + lambda: HeatPump('hp2', electrical_flow=fx.Flow('e5', 'bus', 10), Q_th=fx.Flow('h8', 'bus', 30), cop=3.0), + ), + ( + "HeatPump 'COP'", + lambda: HeatPump( + 'hp3', electrical_flow=fx.Flow('e6', 'bus', 10), thermal_flow=fx.Flow('h9', 'bus', 30), COP=3.0 + ), + ), + ( + "CHP 'Q_fu'", + lambda: CHP( + 'chp1', + Q_fu=fx.Flow('f4', 'bus', 100), + electrical_flow=fx.Flow('e7', 'bus', 30), + thermal_flow=fx.Flow('h10', 'bus', 60), + thermal_efficiency=0.6, + electrical_efficiency=0.3, + ), + ), + ( + "CHP 'P_el'", + lambda: CHP( + 'chp2', + fuel_flow=fx.Flow('f5', 'bus', 100), + P_el=fx.Flow('e8', 'bus', 30), + thermal_flow=fx.Flow('h11', 'bus', 60), + thermal_efficiency=0.6, + electrical_efficiency=0.3, + ), + ), + ( + "CHP 'Q_th'", + lambda: CHP( + 'chp3', + fuel_flow=fx.Flow('f6', 'bus', 100), + electrical_flow=fx.Flow('e9', 'bus', 30), + Q_th=fx.Flow('h12', 'bus', 60), + thermal_efficiency=0.6, + electrical_efficiency=0.3, + ), + ), + ( + "CHP 'eta_th'", + lambda: CHP( + 'chp4', + fuel_flow=fx.Flow('f7', 'bus', 100), + electrical_flow=fx.Flow('e10', 'bus', 30), + thermal_flow=fx.Flow('h13', 'bus', 60), + eta_th=0.6, + electrical_efficiency=0.3, + ), + ), + ( + "CHP 'eta_el'", + lambda: CHP( + 'chp5', + fuel_flow=fx.Flow('f8', 'bus', 100), + electrical_flow=fx.Flow('e11', 'bus', 30), + thermal_flow=fx.Flow('h14', 'bus', 60), + thermal_efficiency=0.6, + eta_el=0.3, + ), + ), + ( + "HeatPumpWithSource 'COP'", + lambda: HeatPumpWithSource( + 'hps1', + electrical_flow=fx.Flow('e12', 'bus', 10), + heat_source_flow=fx.Flow('hs1', 'bus', 20), + thermal_flow=fx.Flow('h15', 'bus', 30), + COP=3.0, + ), + ), + ( + "HeatPumpWithSource 'P_el'", + lambda: HeatPumpWithSource( + 'hps2', + P_el=fx.Flow('e13', 'bus', 10), + heat_source_flow=fx.Flow('hs2', 'bus', 20), + thermal_flow=fx.Flow('h16', 'bus', 30), + cop=3.0, + ), + ), + ( + "HeatPumpWithSource 'Q_ab'", + lambda: HeatPumpWithSource( + 'hps3', + electrical_flow=fx.Flow('e14', 'bus', 10), + Q_ab=fx.Flow('hs3', 'bus', 20), + thermal_flow=fx.Flow('h17', 'bus', 30), + cop=3.0, + ), + ), + ( + "HeatPumpWithSource 'Q_th'", + lambda: HeatPumpWithSource( + 'hps4', + electrical_flow=fx.Flow('e15', 'bus', 10), + heat_source_flow=fx.Flow('hs4', 'bus', 20), + Q_th=fx.Flow('h18', 'bus', 30), + cop=3.0, + ), + ), + # TimeSeriesData parameters + ("TimeSeriesData 'agg_group'", lambda: fx.TimeSeriesData([1, 2, 3], agg_group=1)), + ("TimeSeriesData 'agg_weight'", lambda: fx.TimeSeriesData([1, 2, 3], agg_weight=2.5)), + # Storage parameter + ( + "Storage 'initial_charge_state=lastValueOfSim'", + lambda: fx.Storage( + 'stor1', + charging=fx.Flow('charge', 'bus', 10), + discharging=fx.Flow('discharge', 'bus', 10), + capacity_in_flow_hours=10, + initial_charge_state='lastValueOfSim', + ), + ), + ], + ids=lambda x: x if isinstance(x, str) else '', +) +def test_parameter_deprecations(name, factory): + """Test all parameter deprecations include removal version message.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + factory() + assert len(w) > 0, f'No warning raised for {name}' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message), ( + f'Missing removal version in {name}' + ) + + +# === Property deprecations === +@pytest.fixture(scope='module') +def deprecated_instances(): + """Create instances for property testing.""" + return { + 'data': fx.TimeSeriesData([1, 2, 3], aggregation_group=1), + 'boiler': Boiler( + 'b_prop', fuel_flow=fx.Flow('f_p', 'bus', 10), thermal_flow=fx.Flow('h_p', 'bus', 9), thermal_efficiency=0.9 + ), + 'invest_with_effects': fx.InvestParameters( + minimum_size=10, + maximum_size=100, + mandatory=False, + effects_of_investment={'costs': 100}, + effects_of_investment_per_size={'costs': 10}, + effects_of_retirement={'costs': 50}, + piecewise_effects_of_investment=None, + ), + 'invest': fx.InvestParameters(minimum_size=10, maximum_size=100, mandatory=False), + 'onoff': fx.OnOffParameters( + on_hours_min=5, + on_hours_max=10, + switch_on_max=3, + ), + 'flow': fx.Flow('f_prop', bus='bus', size=10, flow_hours_min=5, flow_hours_max=20), + 'chp': CHP( + 'chp_prop', + fuel_flow=fx.Flow('f_chp', 'bus', 100), + electrical_flow=fx.Flow('e_chp', 'bus', 30), + thermal_flow=fx.Flow('h_chp', 'bus', 60), + thermal_efficiency=0.6, + electrical_efficiency=0.3, + ), + 'hp': HeatPump( + 'hp_prop', electrical_flow=fx.Flow('e_hp', 'bus', 10), thermal_flow=fx.Flow('h_hp', 'bus', 30), cop=3.0 + ), + 'hps': HeatPumpWithSource( + 'hps_prop', + electrical_flow=fx.Flow('e_hps', 'bus', 10), + heat_source_flow=fx.Flow('hs_hps', 'bus', 20), + thermal_flow=fx.Flow('h_hps', 'bus', 30), + cop=3.0, + ), + 'source': fx.Source('source_prop', outputs=[fx.Flow('out', 'bus', 10)]), + 'sink': fx.Sink('sink_prop', inputs=[fx.Flow('in', 'bus', 10)]), + 'storage': fx.Storage( + 'storage_prop', + charging=fx.Flow('charge', 'bus', 10), + discharging=fx.Flow('discharge', 'bus', 10), + capacity_in_flow_hours=10, + ), + 'effect': fx.Effect('effect_prop', unit='€', description='test'), + } + + +@pytest.mark.parametrize( + 'name,accessor', + [ + # TimeSeriesData properties + ('TimeSeriesData.agg_group', lambda objs: objs['data'].agg_group), + ('TimeSeriesData.agg_weight', lambda objs: objs['data'].agg_weight), + # InvestParameters properties + ('InvestParameters.optional', lambda objs: objs['invest'].optional), + ('InvestParameters.fix_effects', lambda objs: objs['invest_with_effects'].fix_effects), + ('InvestParameters.specific_effects', lambda objs: objs['invest_with_effects'].specific_effects), + ('InvestParameters.divest_effects', lambda objs: objs['invest_with_effects'].divest_effects), + ('InvestParameters.piecewise_effects', lambda objs: objs['invest_with_effects'].piecewise_effects), + # OnOffParameters properties + ('OnOffParameters.on_hours_total_min', lambda objs: objs['onoff'].on_hours_total_min), + ('OnOffParameters.on_hours_total_max', lambda objs: objs['onoff'].on_hours_total_max), + ('OnOffParameters.switch_on_total_max', lambda objs: objs['onoff'].switch_on_total_max), + # Flow properties + ('Flow.flow_hours_total_min', lambda objs: objs['flow'].flow_hours_total_min), + ('Flow.flow_hours_total_max', lambda objs: objs['flow'].flow_hours_total_max), + # Boiler properties + ('Boiler.eta', lambda objs: objs['boiler'].eta), + ('Boiler.Q_fu', lambda objs: objs['boiler'].Q_fu), + ('Boiler.Q_th', lambda objs: objs['boiler'].Q_th), + # CHP properties + ('CHP.eta_th', lambda objs: objs['chp'].eta_th), + ('CHP.eta_el', lambda objs: objs['chp'].eta_el), + ('CHP.Q_fu', lambda objs: objs['chp'].Q_fu), + ('CHP.P_el', lambda objs: objs['chp'].P_el), + ('CHP.Q_th', lambda objs: objs['chp'].Q_th), + # HeatPump properties + ('HeatPump.COP', lambda objs: objs['hp'].COP), + ('HeatPump.P_el', lambda objs: objs['hp'].P_el), + ('HeatPump.Q_th', lambda objs: objs['hp'].Q_th), + # HeatPumpWithSource properties + ('HeatPumpWithSource.COP', lambda objs: objs['hps'].COP), + ('HeatPumpWithSource.P_el', lambda objs: objs['hps'].P_el), + ('HeatPumpWithSource.Q_ab', lambda objs: objs['hps'].Q_ab), + ('HeatPumpWithSource.Q_th', lambda objs: objs['hps'].Q_th), + # Source properties + ('Source.source', lambda objs: objs['source'].source), + # Sink properties + ('Sink.sink', lambda objs: objs['sink'].sink), + # Effect property getters + ('Effect.minimum_total_per_period (getter)', lambda objs: objs['effect'].minimum_total_per_period), + ('Effect.maximum_total_per_period (getter)', lambda objs: objs['effect'].maximum_total_per_period), + ], + ids=lambda x: x if isinstance(x, str) else '', +) +def test_property_deprecations(name, accessor, deprecated_instances): + """Test all property deprecations include removal version message.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + accessor(deprecated_instances) + assert len(w) > 0, f'No warning raised for {name}' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message), ( + f'Missing removal version in {name}' + ) + + +# === Property setter deprecations === +@pytest.mark.parametrize( + 'name,setter', + [ + # InvestParameters setter + ('InvestParameters.optional (setter)', lambda: setattr(fx.InvestParameters(minimum_size=10), 'optional', True)), + # OnOffParameters setters + ( + 'OnOffParameters.on_hours_total_min (setter)', + lambda: setattr(fx.OnOffParameters(), 'on_hours_total_min', 10), + ), + ( + 'OnOffParameters.on_hours_total_max (setter)', + lambda: setattr(fx.OnOffParameters(), 'on_hours_total_max', 20), + ), + ( + 'OnOffParameters.switch_on_total_max (setter)', + lambda: setattr(fx.OnOffParameters(), 'switch_on_total_max', 5), + ), + # Flow setters + ('Flow.flow_hours_total_min (setter)', lambda: setattr(fx.Flow('f', 'bus', 10), 'flow_hours_total_min', 5)), + ('Flow.flow_hours_total_max (setter)', lambda: setattr(fx.Flow('f', 'bus', 10), 'flow_hours_total_max', 20)), + # Effect setters + ('Effect.minimum_operation (setter)', lambda: setattr(fx.Effect('e', '€', 'test'), 'minimum_operation', 100)), + ('Effect.maximum_operation (setter)', lambda: setattr(fx.Effect('e', '€', 'test'), 'maximum_operation', 200)), + ('Effect.minimum_invest (setter)', lambda: setattr(fx.Effect('e', '€', 'test'), 'minimum_invest', 50)), + ('Effect.maximum_invest (setter)', lambda: setattr(fx.Effect('e', '€', 'test'), 'maximum_invest', 150)), + ( + 'Effect.minimum_operation_per_hour (setter)', + lambda: setattr(fx.Effect('e', '€', 'test'), 'minimum_operation_per_hour', 10), + ), + ( + 'Effect.maximum_operation_per_hour (setter)', + lambda: setattr(fx.Effect('e', '€', 'test'), 'maximum_operation_per_hour', 30), + ), + ( + 'Effect.minimum_total_per_period (setter)', + lambda: setattr(fx.Effect('e', '€', 'test'), 'minimum_total_per_period', 100), + ), + ( + 'Effect.maximum_total_per_period (setter)', + lambda: setattr(fx.Effect('e', '€', 'test'), 'maximum_total_per_period', 200), + ), + ], + ids=lambda x: x if isinstance(x, str) else '', +) +def test_property_setter_deprecations(name, setter): + """Test all property setter deprecations include removal version message.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + setter() + assert len(w) > 0, f'No warning raised for {name}' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message), ( + f'Missing removal version in {name}' + ) + + +# === FlowSystem-specific deprecations === +def test_flowsystem_all_elements_property(): + """Test FlowSystem.all_elements property deprecation.""" + fs = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=10, freq='h')) + bus = fx.Bus('bus') + fs.add_elements(bus, fx.Source('s1', outputs=[fx.Flow('out', 'bus', 10)])) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + _ = fs.all_elements + assert len(w) > 0, 'No warning raised for FlowSystem.all_elements' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) + + +def test_flowsystem_weights_getter(): + """Test FlowSystem.weights getter deprecation.""" + fs = fx.FlowSystem( + timesteps=pd.date_range('2020-01-01', periods=10, freq='h'), scenarios=pd.Index(['A', 'B'], name='scenario') + ) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + _ = fs.weights + assert len(w) > 0, 'No warning raised for FlowSystem.weights getter' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) + + +def test_flowsystem_weights_setter(): + """Test FlowSystem.weights setter deprecation.""" + fs = fx.FlowSystem( + timesteps=pd.date_range('2020-01-01', periods=10, freq='h'), + scenarios=pd.Index(['A', 'B', 'C'], name='scenario'), + ) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + fs.weights = np.array([1, 2, 3]) + assert len(w) > 0, 'No warning raised for FlowSystem.weights setter' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) + + +# === Calculation deprecations === +def test_calculation_active_timesteps_parameter(): + """Test Calculation active_timesteps parameter deprecation.""" + fs = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=10, freq='h')) + bus = fx.Bus('bus') + fs.add_elements(bus, fx.Source('s1', outputs=[fx.Flow('out', 'bus', 10)])) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + _ = fx.calculation.Calculation('test', fs, active_timesteps=pd.date_range('2020-01-01', periods=5, freq='h')) + assert len(w) > 0, 'No warning raised for Calculation active_timesteps parameter' + # Check that the active_timesteps deprecation warning is in the list (may not be first due to class-level warning) + messages = [str(warning.message) for warning in w] + assert any( + 'active_timesteps' in msg and f'will be removed in v{DEPRECATION_REMOVAL_VERSION}' in msg + for msg in messages + ) + + +def test_calculation_active_timesteps_property(): + """Test Calculation.active_timesteps property deprecation.""" + fs = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=10, freq='h')) + bus = fx.Bus('bus') + fs.add_elements(bus, fx.Source('s1', outputs=[fx.Flow('out', 'bus', 10)])) + calc = fx.calculation.Calculation('test', fs) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + _ = calc.active_timesteps + assert len(w) > 0, 'No warning raised for Calculation.active_timesteps property' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) + + +# === Config function deprecations === +def test_change_logging_level_function(): + """Test change_logging_level() function deprecation.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + change_logging_level('INFO') + assert len(w) > 0, 'No warning raised for change_logging_level()' + assert f'will be removed in version {DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) + + +# === Results-related deprecations === +@pytest.fixture +def simple_results(): + """Create a simple calculation results object for testing.""" + # Create a minimal flow system + fs = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=5, freq='h')) + bus1 = fx.Bus('bus1') + source = fx.Source('source1', outputs=[fx.Flow('out', 'bus1', size=10, effects_per_flow_hour=1)]) + sink = fx.Sink('sink1', inputs=[fx.Flow('in', 'bus1', size=10)]) + fs.add_elements( + bus1, + fx.Effect('costs', '€', 'Costs', is_standard=True, is_objective=True), + source, + sink, + ) + + # Create and solve calculation + calc = fx.FullCalculation('test', fs) + calc.do_modeling() + solver = fx.solvers.HighsSolver(mip_gap=0.01, time_limit_seconds=30) + calc.solve(solver) + + return calc.results + + +def test_results_flow_system_parameter(simple_results): + """Test CalculationResults flow_system parameter deprecation.""" + # Get the flow_system_data from existing results + fs_data = simple_results.flow_system_data + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + # Create new results with deprecated parameter + from flixopt.results import CalculationResults + + _ = CalculationResults( + solution=simple_results.solution, + flow_system_data=None, # Will be overridden by deprecated parameter + flow_system=fs_data, # deprecated parameter + name=simple_results.name, + summary=simple_results.summary, + folder=None, + ) + assert len(w) > 0, 'No warning raised for flow_system parameter' + # Check that the flow_system parameter deprecation warning is in the list (may not be first due to class-level warning) + messages = [str(warning.message) for warning in w] + assert any( + 'flow_system' in msg and f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in msg for msg in messages + ) + + +def test_results_plot_node_balance_indexer(simple_results): + """Test ComponentResults.plot_node_balance indexer parameter deprecation.""" + # Get actual time values from the results + time_coords = simple_results.solution.coords['time'] + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + simple_results['source1'].plot_node_balance( + indexer={'time': slice(time_coords[0].values, time_coords[2].values)}, show=False, save=False + ) + assert len(w) > 0, 'No warning raised for plot_node_balance indexer parameter' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) + + +def test_plot_heatmap_function_heatmap_params(): + """Test plot_heatmap function heatmap_timeframes/heatmap_timesteps_per_frame parameters.""" + # Create simple test data - 7 days * 24 hours = 168 hours + data = xr.DataArray( + np.random.rand(168), + coords={'time': pd.date_range('2020-01-01', periods=168, freq='h')}, + dims=['time'], + ) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + plot_heatmap( + data, + name='test', + heatmap_timeframes='D', # Days + heatmap_timesteps_per_frame='h', # Hours + show=False, + save=False, + ) + assert len(w) > 0, 'No warning raised for heatmap_timeframes/heatmap_timesteps_per_frame' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) + + +def test_plot_heatmap_function_color_map(): + """Test plot_heatmap function color_map parameter.""" + data = xr.DataArray( + np.random.rand(24), + coords={'time': pd.date_range('2020-01-01', periods=24, freq='h')}, + dims=['time'], + ) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + plot_heatmap(data, name='test', color_map='viridis', show=False, save=False) + assert len(w) > 0, 'No warning raised for color_map parameter' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) + + +def test_plot_heatmap_function_indexer(): + """Test plot_heatmap function indexer parameter.""" + time_index = pd.date_range('2020-01-01', periods=24, freq='h') + data = xr.DataArray( + np.random.rand(24), + coords={'time': time_index}, + dims=['time'], + ) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always', DeprecationWarning) + # Use actual datetime values for slicing + plot_heatmap(data, name='test', indexer={'time': slice(time_index[0], time_index[10])}, show=False, save=False) + assert len(w) > 0, 'No warning raised for indexer parameter' + assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) diff --git a/tests/test_effect.py b/tests/test_effect.py index 8293ec62f..33ce59f9e 100644 --- a/tests/test_effect.py +++ b/tests/test_effect.py @@ -7,8 +7,8 @@ assert_conequal, assert_sets_equal, assert_var_equal, - create_calculation_and_solve, create_linopy_model, + create_optimization_and_solve, ) @@ -257,7 +257,7 @@ def test_shares(self, basic_flow_system_linopy_coords, coords_config): ), ) - results = create_calculation_and_solve(flow_system, fx.solvers.HighsSolver(0.01, 60), 'Sim1').results + results = create_optimization_and_solve(flow_system, fx.solvers.HighsSolver(0.01, 60), 'Sim1').results effect_share_factors = { 'temporal': { @@ -340,3 +340,28 @@ def test_shares(self, basic_flow_system_linopy_coords, coords_config): results.effects_per_component['total'].sum('component').sel(effect='Effect3', drop=True), results.solution['Effect3'], ) + + +class TestPenaltyAsObjective: + """Test that Penalty cannot be set as the objective effect.""" + + def test_penalty_cannot_be_created_as_objective(self): + """Test that creating a Penalty effect with is_objective=True raises ValueError.""" + import pytest + + with pytest.raises(ValueError, match='Penalty.*cannot be set as the objective'): + fx.Effect('Penalty', '€', 'Test Penalty', is_objective=True) + + def test_penalty_cannot_be_set_as_objective_via_setter(self): + """Test that setting Penalty as objective via setter raises ValueError.""" + import pandas as pd + import pytest + + # Create a fresh flow system without pre-existing objective + flow_system = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=10, freq='h')) + penalty_effect = fx.Effect('Penalty', '€', 'Test Penalty', is_objective=False) + + flow_system.add_elements(penalty_effect) + + with pytest.raises(ValueError, match='Penalty.*cannot be set as the objective'): + flow_system.effects.objective_effect = penalty_effect diff --git a/tests/test_flow_system_resample.py b/tests/test_flow_system_resample.py index 551fcf483..9ddf4d5e4 100644 --- a/tests/test_flow_system_resample.py +++ b/tests/test_flow_system_resample.py @@ -206,7 +206,7 @@ def test_modeling(with_dim): ) fs_r = fs.resample('4h', method='mean') - calc = fx.FullCalculation('test', fs_r) + calc = fx.Optimization('test', fs_r) calc.do_modeling() assert calc.model is not None @@ -225,11 +225,11 @@ def test_model_structure_preserved(): fx.Source(label='s', outputs=[fx.Flow(label='out', bus='h', size=100, effects_per_flow_hour={'costs': 0.05})]), ) - calc_orig = fx.FullCalculation('orig', fs) + calc_orig = fx.Optimization('orig', fs) calc_orig.do_modeling() fs_r = fs.resample('4h', method='mean') - calc_r = fx.FullCalculation('resamp', fs_r) + calc_r = fx.Optimization('resamp', fs_r) calc_r.do_modeling() # Same number of variable/constraint types diff --git a/tests/test_functional.py b/tests/test_functional.py index 98f118526..ae01a44f2 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -93,11 +93,11 @@ def flow_system_minimal(timesteps) -> fx.FlowSystem: return flow_system -def solve_and_load(flow_system: fx.FlowSystem, solver) -> fx.results.CalculationResults: - calculation = fx.FullCalculation('Calculation', flow_system) - calculation.do_modeling() - calculation.solve(solver) - return calculation.results +def solve_and_load(flow_system: fx.FlowSystem, solver) -> fx.results.Results: + optimization = fx.Optimization('Calculation', flow_system) + optimization.do_modeling() + optimization.solve(solver) + return optimization.results @pytest.fixture diff --git a/tests/test_integration.py b/tests/test_integration.py index 88e4a21af..6ac1e0467 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -1,11 +1,10 @@ -import numpy as np import pytest import flixopt as fx from .conftest import ( assert_almost_equal_numeric, - create_calculation_and_solve, + create_optimization_and_solve, ) @@ -14,9 +13,9 @@ def test_simple_flow_system(self, simple_flow_system, highs_solver): """ Test the effects of the simple energy system model """ - calculation = create_calculation_and_solve(simple_flow_system, highs_solver, 'test_simple_flow_system') + optimization = create_optimization_and_solve(simple_flow_system, highs_solver, 'test_simple_flow_system') - effects = calculation.flow_system.effects + effects = optimization.flow_system.effects # Cost assertions assert_almost_equal_numeric( @@ -32,8 +31,8 @@ def test_model_components(self, simple_flow_system, highs_solver): """ Test the component flows of the simple energy system model """ - calculation = create_calculation_and_solve(simple_flow_system, highs_solver, 'test_model_components') - comps = calculation.flow_system.components + optimization = create_optimization_and_solve(simple_flow_system, highs_solver, 'test_model_components') + comps = optimization.flow_system.components # Boiler assertions assert_almost_equal_numeric( @@ -54,12 +53,12 @@ def test_results_persistence(self, simple_flow_system, highs_solver): Test saving and loading results """ # Save results to file - calculation = create_calculation_and_solve(simple_flow_system, highs_solver, 'test_model_components') + optimization = create_optimization_and_solve(simple_flow_system, highs_solver, 'test_model_components') - calculation.results.to_file() + optimization.results.to_file() # Load results from file - results = fx.results.CalculationResults.from_file(calculation.folder, calculation.name) + results = fx.results.Results.from_file(optimization.folder, optimization.name) # Verify key variables from loaded results assert_almost_equal_numeric( @@ -72,17 +71,17 @@ def test_results_persistence(self, simple_flow_system, highs_solver): class TestComplex: def test_basic_flow_system(self, flow_system_base, highs_solver): - calculation = create_calculation_and_solve(flow_system_base, highs_solver, 'test_basic_flow_system') + optimization = create_optimization_and_solve(flow_system_base, highs_solver, 'test_basic_flow_system') # Assertions assert_almost_equal_numeric( - calculation.results.model['costs'].solution.item(), + optimization.results.model['costs'].solution.item(), -11597.873624489237, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['costs(temporal)|per_timestep'].solution.values, + optimization.results.model['costs(temporal)|per_timestep'].solution.values, [ -2.38500000e03, -2.21681333e03, @@ -98,66 +97,66 @@ def test_basic_flow_system(self, flow_system_base, highs_solver): ) assert_almost_equal_numeric( - sum(calculation.results.model['CO2(temporal)->costs(temporal)'].solution.values), + sum(optimization.results.model['CO2(temporal)->costs(temporal)'].solution.values), 258.63729669618675, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['Kessel(Q_th)->costs(temporal)'].solution.values), + sum(optimization.results.model['Kessel(Q_th)->costs(temporal)'].solution.values), 0.01, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['Kessel->costs(temporal)'].solution.values), + sum(optimization.results.model['Kessel->costs(temporal)'].solution.values), -0.0, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['Gastarif(Q_Gas)->costs(temporal)'].solution.values), + sum(optimization.results.model['Gastarif(Q_Gas)->costs(temporal)'].solution.values), 39.09153113079115, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['Einspeisung(P_el)->costs(temporal)'].solution.values), + sum(optimization.results.model['Einspeisung(P_el)->costs(temporal)'].solution.values), -14196.61245231646, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - sum(calculation.results.model['KWK->costs(temporal)'].solution.values), + sum(optimization.results.model['KWK->costs(temporal)'].solution.values), 0.0, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['Kessel(Q_th)->costs(periodic)'].solution.values, + optimization.results.model['Kessel(Q_th)->costs(periodic)'].solution.values, 1000 + 500, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['Speicher->costs(periodic)'].solution.values, + optimization.results.model['Speicher->costs(periodic)'].solution.values, 800 + 1, 'costs doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['CO2(temporal)'].solution.values, + optimization.results.model['CO2(temporal)'].solution.values, 1293.1864834809337, 'CO2 doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['CO2(periodic)'].solution.values, + optimization.results.model['CO2(periodic)'].solution.values, 0.9999999999999994, 'CO2 doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['Kessel(Q_th)|flow_rate'].solution.values, + optimization.results.model['Kessel(Q_th)|flow_rate'].solution.values, [0, 0, 0, 45, 0, 0, 0, 0, 0], 'Kessel doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['KWK(Q_th)|flow_rate'].solution.values, + optimization.results.model['KWK(Q_th)|flow_rate'].solution.values, [ 7.50000000e01, 6.97111111e01, @@ -172,7 +171,7 @@ def test_basic_flow_system(self, flow_system_base, highs_solver): 'KWK Q_th doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['KWK(P_el)|flow_rate'].solution.values, + optimization.results.model['KWK(P_el)|flow_rate'].solution.values, [ 6.00000000e01, 5.57688889e01, @@ -188,29 +187,29 @@ def test_basic_flow_system(self, flow_system_base, highs_solver): ) assert_almost_equal_numeric( - calculation.results.model['Speicher|netto_discharge'].solution.values, + optimization.results.model['Speicher|netto_discharge'].solution.values, [-45.0, -69.71111111, 15.0, -10.0, 36.06697198, -55.0, 20.0, 20.0, 20.0], 'Speicher nettoFlow doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['Speicher|charge_state'].solution.values, + optimization.results.model['Speicher|charge_state'].solution.values, [0.0, 40.5, 100.0, 77.0, 79.84, 37.38582802, 83.89496178, 57.18336484, 32.60869565, 10.0], 'Speicher nettoFlow doesnt match expected value', ) assert_almost_equal_numeric( - calculation.results.model['Speicher|PiecewiseEffects|costs'].solution.values, + optimization.results.model['Speicher|PiecewiseEffects|costs'].solution.values, 800, 'Speicher|PiecewiseEffects|costs doesnt match expected value', ) def test_piecewise_conversion(self, flow_system_piecewise_conversion, highs_solver): - calculation = create_calculation_and_solve( + optimization = create_optimization_and_solve( flow_system_piecewise_conversion, highs_solver, 'test_piecewise_conversion' ) - effects = calculation.flow_system.effects - comps = calculation.flow_system.components + effects = optimization.flow_system.effects + comps = optimization.flow_system.components # Compare expected values with actual values assert_almost_equal_numeric( @@ -254,7 +253,7 @@ class TestModelingTypes: @pytest.fixture(params=['full', 'segmented', 'aggregated']) def modeling_calculation(self, request, flow_system_long, highs_solver): """ - Fixture to run calculations with different modeling types + Fixture to run optimizations with different modeling types """ # Extract flow system and data from the fixture flow_system = flow_system_long[0] @@ -264,7 +263,7 @@ def modeling_calculation(self, request, flow_system_long, highs_solver): # Create calculation based on modeling type modeling_type = request.param if modeling_type == 'full': - calc = fx.FullCalculation('fullModel', flow_system) + calc = fx.Optimization('fullModel', flow_system) calc.do_modeling() calc.solve(highs_solver) elif modeling_type == 'segmented': @@ -319,7 +318,7 @@ def test_segmented_io(self, modeling_calculation): calc, modeling_type = modeling_calculation if modeling_type == 'segmented': calc.results.to_file() - _ = fx.results.SegmentedCalculationResults.from_file(calc.folder, calc.name) + _ = fx.results.SegmentedResults.from_file(calc.folder, calc.name) if __name__ == '__main__': diff --git a/tests/test_io.py b/tests/test_io.py index 6d225734e..9f54799b8 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -1,10 +1,9 @@ import uuid -import numpy as np import pytest import flixopt as fx -from flixopt.io import CalculationResultsPaths +from flixopt.io import ResultsPaths from .conftest import ( assert_almost_equal_numeric, @@ -40,16 +39,16 @@ def test_flow_system_file_io(flow_system, highs_solver, request): worker_id = getattr(request.config, 'workerinput', {}).get('workerid', 'main') test_id = f'{worker_id}-{unique_id}' - calculation_0 = fx.FullCalculation(f'IO-{test_id}', flow_system=flow_system) + calculation_0 = fx.Optimization(f'IO-{test_id}', flow_system=flow_system) calculation_0.do_modeling() calculation_0.solve(highs_solver) calculation_0.flow_system.plot_network() calculation_0.results.to_file() - paths = CalculationResultsPaths(calculation_0.folder, calculation_0.name) + paths = ResultsPaths(calculation_0.folder, calculation_0.name) flow_system_1 = fx.FlowSystem.from_netcdf(paths.flow_system) - calculation_1 = fx.FullCalculation(f'Loaded_IO-{test_id}', flow_system=flow_system_1) + calculation_1 = fx.Optimization(f'Loaded_IO-{test_id}', flow_system=flow_system_1) calculation_1.do_modeling() calculation_1.solve(highs_solver) calculation_1.flow_system.plot_network() @@ -83,7 +82,6 @@ def test_flow_system_io(flow_system): def test_suppress_output_file_descriptors(tmp_path): """Test that suppress_output() redirects file descriptors to /dev/null.""" import os - import sys from flixopt.io import suppress_output diff --git a/tests/test_overwrite_protection.py b/tests/test_overwrite_protection.py new file mode 100644 index 000000000..4651f1a68 --- /dev/null +++ b/tests/test_overwrite_protection.py @@ -0,0 +1,64 @@ +"""Tests for Results.to_file() overwrite protection.""" + +import pathlib +import tempfile + +import pytest + +import flixopt as fx + + +def test_results_overwrite_protection(simple_flow_system, highs_solver): + """Test that Results.to_file() prevents accidental overwriting.""" + with tempfile.TemporaryDirectory() as tmpdir: + test_folder = pathlib.Path(tmpdir) / 'results' + + # Run optimization + opt = fx.Optimization('test_results', simple_flow_system, folder=test_folder) + opt.do_modeling() + opt.solve(highs_solver) + + # First save should succeed + opt.results.to_file(compression=0, document_model=False, save_linopy_model=False) + + # Second save without overwrite should fail + with pytest.raises(FileExistsError, match='Results files already exist'): + opt.results.to_file(compression=0, document_model=False, save_linopy_model=False) + + # Third save with overwrite should succeed + opt.results.to_file(compression=0, document_model=False, save_linopy_model=False, overwrite=True) + + +def test_results_overwrite_to_different_folder(simple_flow_system, highs_solver): + """Test that saving to different folder works without overwrite flag.""" + with tempfile.TemporaryDirectory() as tmpdir: + test_folder1 = pathlib.Path(tmpdir) / 'results1' + test_folder2 = pathlib.Path(tmpdir) / 'results2' + + # Run optimization + opt = fx.Optimization('test_results', simple_flow_system, folder=test_folder1) + opt.do_modeling() + opt.solve(highs_solver) + + # Save to first folder + opt.results.to_file(compression=0, document_model=False, save_linopy_model=False) + + # Save to different folder should work without overwrite flag + opt.results.to_file(folder=test_folder2, compression=0, document_model=False, save_linopy_model=False) + + +def test_results_overwrite_with_different_name(simple_flow_system, highs_solver): + """Test that saving with different name works without overwrite flag.""" + with tempfile.TemporaryDirectory() as tmpdir: + test_folder = pathlib.Path(tmpdir) / 'results' + + # Run optimization + opt = fx.Optimization('test_results', simple_flow_system, folder=test_folder) + opt.do_modeling() + opt.solve(highs_solver) + + # Save with first name + opt.results.to_file(compression=0, document_model=False, save_linopy_model=False) + + # Save with different name should work without overwrite flag + opt.results.to_file(name='test_results_v2', compression=0, document_model=False, save_linopy_model=False) diff --git a/tests/test_results_plots.py b/tests/test_results_plots.py index a656f7c44..f68f5ec07 100644 --- a/tests/test_results_plots.py +++ b/tests/test_results_plots.py @@ -3,7 +3,7 @@ import flixopt as fx -from .conftest import create_calculation_and_solve, simple_flow_system +from .conftest import create_optimization_and_solve, simple_flow_system @pytest.fixture(params=[True, False]) @@ -43,8 +43,8 @@ def color_spec(request): @pytest.mark.slow def test_results_plots(flow_system, plotting_engine, show, save, color_spec): - calculation = create_calculation_and_solve(flow_system, fx.solvers.HighsSolver(0.01, 30), 'test_results_plots') - results = calculation.results + optimization = create_optimization_and_solve(flow_system, fx.solvers.HighsSolver(0.01, 30), 'test_results_plots') + results = optimization.results results['Boiler'].plot_node_balance(engine=plotting_engine, save=save, show=show, colors=color_spec) @@ -78,8 +78,8 @@ def test_results_plots(flow_system, plotting_engine, show, save, color_spec): @pytest.mark.slow def test_color_handling_edge_cases(flow_system, plotting_engine, show, save): """Test edge cases for color handling""" - calculation = create_calculation_and_solve(flow_system, fx.solvers.HighsSolver(0.01, 30), 'test_color_edge_cases') - results = calculation.results + optimization = create_optimization_and_solve(flow_system, fx.solvers.HighsSolver(0.01, 30), 'test_color_edge_cases') + results = optimization.results # Test with empty color list (should fall back to default) results['Boiler'].plot_node_balance(engine=plotting_engine, save=save, show=show, colors=[]) diff --git a/tests/test_scenarios.py b/tests/test_scenarios.py index 91c9513d6..bd402cb8c 100644 --- a/tests/test_scenarios.py +++ b/tests/test_scenarios.py @@ -1,3 +1,5 @@ +import tempfile + import numpy as np import pandas as pd import pytest @@ -9,7 +11,7 @@ from flixopt.elements import Bus, Flow from flixopt.flow_system import FlowSystem -from .conftest import create_calculation_and_solve, create_linopy_model +from .conftest import create_linopy_model, create_optimization_and_solve @pytest.fixture @@ -249,8 +251,11 @@ def test_weights(flow_system_piecewise_conversion_scenarios): model = create_linopy_model(flow_system_piecewise_conversion_scenarios) normalized_weights = scenario_weights / sum(scenario_weights) np.testing.assert_allclose(model.objective_weights.values, normalized_weights) + # Penalty is now an effect with temporal and periodic components + penalty_total = flow_system_piecewise_conversion_scenarios.effects.penalty_effect.submodel.total assert_linequal( - model.objective.expression, (model.variables['costs'] * normalized_weights).sum() + model.variables['Penalty'] + model.objective.expression, + (model.variables['costs'] * normalized_weights).sum() + (penalty_total * normalized_weights).sum(), ) assert np.isclose(model.objective_weights.sum().item(), 1) @@ -269,9 +274,12 @@ def test_weights_io(flow_system_piecewise_conversion_scenarios): model = create_linopy_model(flow_system_piecewise_conversion_scenarios) np.testing.assert_allclose(model.objective_weights.values, normalized_scenario_weights_da) + # Penalty is now an effect with temporal and periodic components + penalty_total = flow_system_piecewise_conversion_scenarios.effects.penalty_effect.submodel.total assert_linequal( model.objective.expression, - (model.variables['costs'] * normalized_scenario_weights_da).sum() + model.variables['Penalty'], + (model.variables['costs'] * normalized_scenario_weights_da).sum() + + (penalty_total * normalized_scenario_weights_da).sum(), ) assert np.isclose(model.objective_weights.sum().item(), 1.0) @@ -288,19 +296,19 @@ def test_full_scenario_optimization(flow_system_piecewise_conversion_scenarios): scenarios = flow_system_piecewise_conversion_scenarios.scenarios weights = np.linspace(0.5, 1, len(scenarios)) / np.sum(np.linspace(0.5, 1, len(scenarios))) flow_system_piecewise_conversion_scenarios.scenario_weights = weights - calc = create_calculation_and_solve( + calc = create_optimization_and_solve( flow_system_piecewise_conversion_scenarios, solver=fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60), name='test_full_scenario', ) calc.results.to_file() - res = fx.results.CalculationResults.from_file('results', 'test_full_scenario') + res = fx.results.Results.from_file('results', 'test_full_scenario') fx.FlowSystem.from_dataset(res.flow_system_data) - calc = create_calculation_and_solve( + _ = create_optimization_and_solve( flow_system_piecewise_conversion_scenarios, solver=fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60), - name='test_full_scenario', + name='test_full_scenario_2', ) @@ -310,19 +318,19 @@ def test_io_persistence(flow_system_piecewise_conversion_scenarios): scenarios = flow_system_piecewise_conversion_scenarios.scenarios weights = np.linspace(0.5, 1, len(scenarios)) / np.sum(np.linspace(0.5, 1, len(scenarios))) flow_system_piecewise_conversion_scenarios.scenario_weights = weights - calc = create_calculation_and_solve( + calc = create_optimization_and_solve( flow_system_piecewise_conversion_scenarios, solver=fx.solvers.HighsSolver(mip_gap=0.001, time_limit_seconds=60), - name='test_full_scenario', + name='test_io_persistence', ) calc.results.to_file() - res = fx.results.CalculationResults.from_file('results', 'test_full_scenario') + res = fx.results.Results.from_file('results', 'test_io_persistence') flow_system_2 = fx.FlowSystem.from_dataset(res.flow_system_data) - calc_2 = create_calculation_and_solve( + calc_2 = create_optimization_and_solve( flow_system_2, solver=fx.solvers.HighsSolver(mip_gap=0.001, time_limit_seconds=60), - name='test_full_scenario_2', + name='test_io_persistence_2', ) np.testing.assert_allclose(calc.results.objective, calc_2.results.objective, rtol=0.001) @@ -339,15 +347,19 @@ def test_scenarios_selection(flow_system_piecewise_conversion_scenarios): np.testing.assert_allclose(flow_system.weights.values, flow_system_full.weights[0:2]) - calc = fx.FullCalculation(flow_system=flow_system, name='test_full_scenario', normalize_weights=False) + calc = fx.Optimization(flow_system=flow_system, name='test_scenarios_selection', normalize_weights=False) calc.do_modeling() calc.solve(fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60)) calc.results.to_file() + # Penalty has same structure as other effects: 'Penalty' is the total, 'Penalty(temporal)' and 'Penalty(periodic)' are components np.testing.assert_allclose( calc.results.objective, - ((calc.results.solution['costs'] * flow_system.weights).sum() + calc.results.solution['Penalty']).item(), + ( + (calc.results.solution['costs'] * flow_system.weights).sum() + + (calc.results.solution['Penalty'] * flow_system.weights).sum() + ).item(), ) ## Account for rounding errors assert calc.results.solution.indexes['scenario'].equals(flow_system_full.scenarios[0:2]) @@ -484,7 +496,7 @@ def test_size_equality_constraints(): fs.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True)) - calc = fx.FullCalculation('test', fs) + calc = fx.Optimization('test', fs) calc.do_modeling() # Check that size equality constraint exists @@ -524,7 +536,7 @@ def test_flow_rate_equality_constraints(): fs.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True)) - calc = fx.FullCalculation('test', fs) + calc = fx.Optimization('test', fs) calc.do_modeling() # Check that flow_rate equality constraint exists @@ -566,7 +578,7 @@ def test_selective_scenario_independence(): fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True)) - calc = fx.FullCalculation('test', fs) + calc = fx.Optimization('test', fs) calc.do_modeling() constraint_names = [str(c) for c in calc.model.constraints] @@ -594,8 +606,6 @@ def test_selective_scenario_independence(): def test_scenario_parameters_io_persistence(): """Test that scenario_independent_sizes and scenario_independent_flow_rates persist through IO operations.""" - import shutil - import tempfile timesteps = pd.date_range('2023-01-01', periods=24, freq='h') scenarios = pd.Index(['base', 'high'], name='scenario') @@ -639,7 +649,6 @@ def test_scenario_parameters_io_persistence(): def test_scenario_parameters_io_with_calculation(): """Test that scenario parameters persist through full calculation IO.""" import shutil - import tempfile timesteps = pd.date_range('2023-01-01', periods=24, freq='h') scenarios = pd.Index(['base', 'high'], name='scenario') @@ -676,13 +685,13 @@ def test_scenario_parameters_io_with_calculation(): try: # Solve and save - calc = fx.FullCalculation('test_io', fs, folder=temp_dir) + calc = fx.Optimization('test_io', fs, folder=temp_dir) calc.do_modeling() calc.solve(fx.solvers.HighsSolver(mip_gap=0.01, time_limit_seconds=60)) calc.results.to_file() # Load results - results = fx.results.CalculationResults.from_file(temp_dir, 'test_io') + results = fx.results.Results.from_file(temp_dir, 'test_io') fs_loaded = fx.FlowSystem.from_dataset(results.flow_system_data) # Verify parameters persisted @@ -690,7 +699,7 @@ def test_scenario_parameters_io_with_calculation(): assert fs_loaded.scenario_independent_flow_rates == fs.scenario_independent_flow_rates # Verify constraints are recreated correctly - calc2 = fx.FullCalculation('test_io_2', fs_loaded, folder=temp_dir) + calc2 = fx.Optimization('test_io_2', fs_loaded, folder=temp_dir) calc2.do_modeling() constraint_names1 = [str(c) for c in calc.model.constraints] From 4ca07f660a589871a2df7dd3d9ff5518c1736f9a Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sun, 30 Nov 2025 02:00:13 +0100 Subject: [PATCH 05/49] Merge branch 'main' into feature/v5 # Conflicts: # flixopt/core.py # flixopt/effects.py # test_deprecations.py # tests/test_invest_parameters_deprecation.py --- tests/test_deprecations.py | 613 ------------------------------------- 1 file changed, 613 deletions(-) delete mode 100644 tests/test_deprecations.py diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py deleted file mode 100644 index c77d794a5..000000000 --- a/tests/test_deprecations.py +++ /dev/null @@ -1,613 +0,0 @@ -"""Comprehensive pytest-based test for all deprecation warnings with v5.0.0 removal message.""" - -import warnings - -import numpy as np -import pandas as pd -import pytest -import xarray as xr - -import flixopt as fx -from flixopt.config import DEPRECATION_REMOVAL_VERSION, change_logging_level -from flixopt.linear_converters import CHP, Boiler, HeatPump, HeatPumpWithSource, Power2Heat -from flixopt.results import plot_heatmap - - -# === Parameter deprecations (via _handle_deprecated_kwarg) === -@pytest.mark.parametrize( - 'name,factory', - [ - ("Source 'source'", lambda: fx.Source('s1', source=fx.Flow('out1', bus='bus', size=10))), - ("Sink 'sink'", lambda: fx.Sink('sink1', sink=fx.Flow('in2', bus='bus', size=10))), - ("InvestParameters 'fix_effects'", lambda: fx.InvestParameters(minimum_size=10, fix_effects={'costs': 100})), - ( - "InvestParameters 'specific_effects'", - lambda: fx.InvestParameters(minimum_size=10, specific_effects={'costs': 10}), - ), - ( - "InvestParameters 'divest_effects'", - lambda: fx.InvestParameters(minimum_size=10, divest_effects={'costs': 50}), - ), - ( - "InvestParameters 'piecewise_effects'", - lambda: fx.InvestParameters(minimum_size=10, piecewise_effects=[]), - ), - ("InvestParameters 'optional'", lambda: fx.InvestParameters(minimum_size=10, optional=True)), - ("OnOffParameters 'on_hours_total_min'", lambda: fx.OnOffParameters(on_hours_total_min=10)), - ("OnOffParameters 'on_hours_total_max'", lambda: fx.OnOffParameters(on_hours_total_max=20)), - ("OnOffParameters 'switch_on_total_max'", lambda: fx.OnOffParameters(switch_on_total_max=5)), - ("Flow 'flow_hours_total_min'", lambda: fx.Flow('f1', bus='bus', size=10, flow_hours_total_min=5)), - ("Flow 'flow_hours_total_max'", lambda: fx.Flow('f2', bus='bus', size=10, flow_hours_total_max=20)), - ( - "Flow 'flow_hours_per_period_min'", - lambda: fx.Flow('f3', bus='bus', size=10, flow_hours_per_period_min=5), - ), - ( - "Flow 'flow_hours_per_period_max'", - lambda: fx.Flow('f4', bus='bus', size=10, flow_hours_per_period_max=20), - ), - ("Flow 'total_flow_hours_min'", lambda: fx.Flow('f5', bus='bus', size=10, total_flow_hours_min=5)), - ("Flow 'total_flow_hours_max'", lambda: fx.Flow('f6', bus='bus', size=10, total_flow_hours_max=20)), - ( - "Effect 'minimum_operation'", - lambda: fx.Effect('e1', unit='€', description='test', minimum_operation=100), - ), - ( - "Effect 'maximum_operation'", - lambda: fx.Effect('e2', unit='€', description='test', maximum_operation=200), - ), - ("Effect 'minimum_invest'", lambda: fx.Effect('e3', unit='€', description='test', minimum_invest=50)), - ("Effect 'maximum_invest'", lambda: fx.Effect('e4', unit='€', description='test', maximum_invest=150)), - ( - "Effect 'minimum_operation_per_hour'", - lambda: fx.Effect('e5', unit='€', description='test', minimum_operation_per_hour=10), - ), - ( - "Effect 'maximum_operation_per_hour'", - lambda: fx.Effect('e6', unit='€', description='test', maximum_operation_per_hour=30), - ), - # Linear converters - ( - "Boiler 'Q_fu'", - lambda: Boiler( - 'b1', Q_fu=fx.Flow('f1', 'bus', 10), thermal_flow=fx.Flow('h1', 'bus', 9), thermal_efficiency=0.9 - ), - ), - ( - "Boiler 'Q_th'", - lambda: Boiler( - 'b2', fuel_flow=fx.Flow('f2', 'bus', 10), Q_th=fx.Flow('h2', 'bus', 9), thermal_efficiency=0.9 - ), - ), - ( - "Boiler 'eta'", - lambda: Boiler('b3', fuel_flow=fx.Flow('f3', 'bus', 10), thermal_flow=fx.Flow('h3', 'bus', 9), eta=0.9), - ), - ( - "Power2Heat 'P_el'", - lambda: Power2Heat( - 'p1', P_el=fx.Flow('e1', 'bus', 10), thermal_flow=fx.Flow('h4', 'bus', 9), thermal_efficiency=0.9 - ), - ), - ( - "Power2Heat 'Q_th'", - lambda: Power2Heat( - 'p2', electrical_flow=fx.Flow('e2', 'bus', 10), Q_th=fx.Flow('h5', 'bus', 9), thermal_efficiency=0.9 - ), - ), - ( - "Power2Heat 'eta'", - lambda: Power2Heat( - 'p3', electrical_flow=fx.Flow('e3', 'bus', 10), thermal_flow=fx.Flow('h6', 'bus', 9), eta=0.9 - ), - ), - ( - "HeatPump 'P_el'", - lambda: HeatPump('hp1', P_el=fx.Flow('e4', 'bus', 10), thermal_flow=fx.Flow('h7', 'bus', 30), cop=3.0), - ), - ( - "HeatPump 'Q_th'", - lambda: HeatPump('hp2', electrical_flow=fx.Flow('e5', 'bus', 10), Q_th=fx.Flow('h8', 'bus', 30), cop=3.0), - ), - ( - "HeatPump 'COP'", - lambda: HeatPump( - 'hp3', electrical_flow=fx.Flow('e6', 'bus', 10), thermal_flow=fx.Flow('h9', 'bus', 30), COP=3.0 - ), - ), - ( - "CHP 'Q_fu'", - lambda: CHP( - 'chp1', - Q_fu=fx.Flow('f4', 'bus', 100), - electrical_flow=fx.Flow('e7', 'bus', 30), - thermal_flow=fx.Flow('h10', 'bus', 60), - thermal_efficiency=0.6, - electrical_efficiency=0.3, - ), - ), - ( - "CHP 'P_el'", - lambda: CHP( - 'chp2', - fuel_flow=fx.Flow('f5', 'bus', 100), - P_el=fx.Flow('e8', 'bus', 30), - thermal_flow=fx.Flow('h11', 'bus', 60), - thermal_efficiency=0.6, - electrical_efficiency=0.3, - ), - ), - ( - "CHP 'Q_th'", - lambda: CHP( - 'chp3', - fuel_flow=fx.Flow('f6', 'bus', 100), - electrical_flow=fx.Flow('e9', 'bus', 30), - Q_th=fx.Flow('h12', 'bus', 60), - thermal_efficiency=0.6, - electrical_efficiency=0.3, - ), - ), - ( - "CHP 'eta_th'", - lambda: CHP( - 'chp4', - fuel_flow=fx.Flow('f7', 'bus', 100), - electrical_flow=fx.Flow('e10', 'bus', 30), - thermal_flow=fx.Flow('h13', 'bus', 60), - eta_th=0.6, - electrical_efficiency=0.3, - ), - ), - ( - "CHP 'eta_el'", - lambda: CHP( - 'chp5', - fuel_flow=fx.Flow('f8', 'bus', 100), - electrical_flow=fx.Flow('e11', 'bus', 30), - thermal_flow=fx.Flow('h14', 'bus', 60), - thermal_efficiency=0.6, - eta_el=0.3, - ), - ), - ( - "HeatPumpWithSource 'COP'", - lambda: HeatPumpWithSource( - 'hps1', - electrical_flow=fx.Flow('e12', 'bus', 10), - heat_source_flow=fx.Flow('hs1', 'bus', 20), - thermal_flow=fx.Flow('h15', 'bus', 30), - COP=3.0, - ), - ), - ( - "HeatPumpWithSource 'P_el'", - lambda: HeatPumpWithSource( - 'hps2', - P_el=fx.Flow('e13', 'bus', 10), - heat_source_flow=fx.Flow('hs2', 'bus', 20), - thermal_flow=fx.Flow('h16', 'bus', 30), - cop=3.0, - ), - ), - ( - "HeatPumpWithSource 'Q_ab'", - lambda: HeatPumpWithSource( - 'hps3', - electrical_flow=fx.Flow('e14', 'bus', 10), - Q_ab=fx.Flow('hs3', 'bus', 20), - thermal_flow=fx.Flow('h17', 'bus', 30), - cop=3.0, - ), - ), - ( - "HeatPumpWithSource 'Q_th'", - lambda: HeatPumpWithSource( - 'hps4', - electrical_flow=fx.Flow('e15', 'bus', 10), - heat_source_flow=fx.Flow('hs4', 'bus', 20), - Q_th=fx.Flow('h18', 'bus', 30), - cop=3.0, - ), - ), - # TimeSeriesData parameters - ("TimeSeriesData 'agg_group'", lambda: fx.TimeSeriesData([1, 2, 3], agg_group=1)), - ("TimeSeriesData 'agg_weight'", lambda: fx.TimeSeriesData([1, 2, 3], agg_weight=2.5)), - # Storage parameter - ( - "Storage 'initial_charge_state=lastValueOfSim'", - lambda: fx.Storage( - 'stor1', - charging=fx.Flow('charge', 'bus', 10), - discharging=fx.Flow('discharge', 'bus', 10), - capacity_in_flow_hours=10, - initial_charge_state='lastValueOfSim', - ), - ), - ], - ids=lambda x: x if isinstance(x, str) else '', -) -def test_parameter_deprecations(name, factory): - """Test all parameter deprecations include removal version message.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - factory() - assert len(w) > 0, f'No warning raised for {name}' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message), ( - f'Missing removal version in {name}' - ) - - -# === Property deprecations === -@pytest.fixture(scope='module') -def deprecated_instances(): - """Create instances for property testing.""" - return { - 'data': fx.TimeSeriesData([1, 2, 3], aggregation_group=1), - 'boiler': Boiler( - 'b_prop', fuel_flow=fx.Flow('f_p', 'bus', 10), thermal_flow=fx.Flow('h_p', 'bus', 9), thermal_efficiency=0.9 - ), - 'invest_with_effects': fx.InvestParameters( - minimum_size=10, - maximum_size=100, - mandatory=False, - effects_of_investment={'costs': 100}, - effects_of_investment_per_size={'costs': 10}, - effects_of_retirement={'costs': 50}, - piecewise_effects_of_investment=None, - ), - 'invest': fx.InvestParameters(minimum_size=10, maximum_size=100, mandatory=False), - 'onoff': fx.OnOffParameters( - on_hours_min=5, - on_hours_max=10, - switch_on_max=3, - ), - 'flow': fx.Flow('f_prop', bus='bus', size=10, flow_hours_min=5, flow_hours_max=20), - 'chp': CHP( - 'chp_prop', - fuel_flow=fx.Flow('f_chp', 'bus', 100), - electrical_flow=fx.Flow('e_chp', 'bus', 30), - thermal_flow=fx.Flow('h_chp', 'bus', 60), - thermal_efficiency=0.6, - electrical_efficiency=0.3, - ), - 'hp': HeatPump( - 'hp_prop', electrical_flow=fx.Flow('e_hp', 'bus', 10), thermal_flow=fx.Flow('h_hp', 'bus', 30), cop=3.0 - ), - 'hps': HeatPumpWithSource( - 'hps_prop', - electrical_flow=fx.Flow('e_hps', 'bus', 10), - heat_source_flow=fx.Flow('hs_hps', 'bus', 20), - thermal_flow=fx.Flow('h_hps', 'bus', 30), - cop=3.0, - ), - 'source': fx.Source('source_prop', outputs=[fx.Flow('out', 'bus', 10)]), - 'sink': fx.Sink('sink_prop', inputs=[fx.Flow('in', 'bus', 10)]), - 'storage': fx.Storage( - 'storage_prop', - charging=fx.Flow('charge', 'bus', 10), - discharging=fx.Flow('discharge', 'bus', 10), - capacity_in_flow_hours=10, - ), - 'effect': fx.Effect('effect_prop', unit='€', description='test'), - } - - -@pytest.mark.parametrize( - 'name,accessor', - [ - # TimeSeriesData properties - ('TimeSeriesData.agg_group', lambda objs: objs['data'].agg_group), - ('TimeSeriesData.agg_weight', lambda objs: objs['data'].agg_weight), - # InvestParameters properties - ('InvestParameters.optional', lambda objs: objs['invest'].optional), - ('InvestParameters.fix_effects', lambda objs: objs['invest_with_effects'].fix_effects), - ('InvestParameters.specific_effects', lambda objs: objs['invest_with_effects'].specific_effects), - ('InvestParameters.divest_effects', lambda objs: objs['invest_with_effects'].divest_effects), - ('InvestParameters.piecewise_effects', lambda objs: objs['invest_with_effects'].piecewise_effects), - # OnOffParameters properties - ('OnOffParameters.on_hours_total_min', lambda objs: objs['onoff'].on_hours_total_min), - ('OnOffParameters.on_hours_total_max', lambda objs: objs['onoff'].on_hours_total_max), - ('OnOffParameters.switch_on_total_max', lambda objs: objs['onoff'].switch_on_total_max), - # Flow properties - ('Flow.flow_hours_total_min', lambda objs: objs['flow'].flow_hours_total_min), - ('Flow.flow_hours_total_max', lambda objs: objs['flow'].flow_hours_total_max), - # Boiler properties - ('Boiler.eta', lambda objs: objs['boiler'].eta), - ('Boiler.Q_fu', lambda objs: objs['boiler'].Q_fu), - ('Boiler.Q_th', lambda objs: objs['boiler'].Q_th), - # CHP properties - ('CHP.eta_th', lambda objs: objs['chp'].eta_th), - ('CHP.eta_el', lambda objs: objs['chp'].eta_el), - ('CHP.Q_fu', lambda objs: objs['chp'].Q_fu), - ('CHP.P_el', lambda objs: objs['chp'].P_el), - ('CHP.Q_th', lambda objs: objs['chp'].Q_th), - # HeatPump properties - ('HeatPump.COP', lambda objs: objs['hp'].COP), - ('HeatPump.P_el', lambda objs: objs['hp'].P_el), - ('HeatPump.Q_th', lambda objs: objs['hp'].Q_th), - # HeatPumpWithSource properties - ('HeatPumpWithSource.COP', lambda objs: objs['hps'].COP), - ('HeatPumpWithSource.P_el', lambda objs: objs['hps'].P_el), - ('HeatPumpWithSource.Q_ab', lambda objs: objs['hps'].Q_ab), - ('HeatPumpWithSource.Q_th', lambda objs: objs['hps'].Q_th), - # Source properties - ('Source.source', lambda objs: objs['source'].source), - # Sink properties - ('Sink.sink', lambda objs: objs['sink'].sink), - # Effect property getters - ('Effect.minimum_total_per_period (getter)', lambda objs: objs['effect'].minimum_total_per_period), - ('Effect.maximum_total_per_period (getter)', lambda objs: objs['effect'].maximum_total_per_period), - ], - ids=lambda x: x if isinstance(x, str) else '', -) -def test_property_deprecations(name, accessor, deprecated_instances): - """Test all property deprecations include removal version message.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - accessor(deprecated_instances) - assert len(w) > 0, f'No warning raised for {name}' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message), ( - f'Missing removal version in {name}' - ) - - -# === Property setter deprecations === -@pytest.mark.parametrize( - 'name,setter', - [ - # InvestParameters setter - ('InvestParameters.optional (setter)', lambda: setattr(fx.InvestParameters(minimum_size=10), 'optional', True)), - # OnOffParameters setters - ( - 'OnOffParameters.on_hours_total_min (setter)', - lambda: setattr(fx.OnOffParameters(), 'on_hours_total_min', 10), - ), - ( - 'OnOffParameters.on_hours_total_max (setter)', - lambda: setattr(fx.OnOffParameters(), 'on_hours_total_max', 20), - ), - ( - 'OnOffParameters.switch_on_total_max (setter)', - lambda: setattr(fx.OnOffParameters(), 'switch_on_total_max', 5), - ), - # Flow setters - ('Flow.flow_hours_total_min (setter)', lambda: setattr(fx.Flow('f', 'bus', 10), 'flow_hours_total_min', 5)), - ('Flow.flow_hours_total_max (setter)', lambda: setattr(fx.Flow('f', 'bus', 10), 'flow_hours_total_max', 20)), - # Effect setters - ('Effect.minimum_operation (setter)', lambda: setattr(fx.Effect('e', '€', 'test'), 'minimum_operation', 100)), - ('Effect.maximum_operation (setter)', lambda: setattr(fx.Effect('e', '€', 'test'), 'maximum_operation', 200)), - ('Effect.minimum_invest (setter)', lambda: setattr(fx.Effect('e', '€', 'test'), 'minimum_invest', 50)), - ('Effect.maximum_invest (setter)', lambda: setattr(fx.Effect('e', '€', 'test'), 'maximum_invest', 150)), - ( - 'Effect.minimum_operation_per_hour (setter)', - lambda: setattr(fx.Effect('e', '€', 'test'), 'minimum_operation_per_hour', 10), - ), - ( - 'Effect.maximum_operation_per_hour (setter)', - lambda: setattr(fx.Effect('e', '€', 'test'), 'maximum_operation_per_hour', 30), - ), - ( - 'Effect.minimum_total_per_period (setter)', - lambda: setattr(fx.Effect('e', '€', 'test'), 'minimum_total_per_period', 100), - ), - ( - 'Effect.maximum_total_per_period (setter)', - lambda: setattr(fx.Effect('e', '€', 'test'), 'maximum_total_per_period', 200), - ), - ], - ids=lambda x: x if isinstance(x, str) else '', -) -def test_property_setter_deprecations(name, setter): - """Test all property setter deprecations include removal version message.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - setter() - assert len(w) > 0, f'No warning raised for {name}' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message), ( - f'Missing removal version in {name}' - ) - - -# === FlowSystem-specific deprecations === -def test_flowsystem_all_elements_property(): - """Test FlowSystem.all_elements property deprecation.""" - fs = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=10, freq='h')) - bus = fx.Bus('bus') - fs.add_elements(bus, fx.Source('s1', outputs=[fx.Flow('out', 'bus', 10)])) - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - _ = fs.all_elements - assert len(w) > 0, 'No warning raised for FlowSystem.all_elements' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) - - -def test_flowsystem_weights_getter(): - """Test FlowSystem.weights getter deprecation.""" - fs = fx.FlowSystem( - timesteps=pd.date_range('2020-01-01', periods=10, freq='h'), scenarios=pd.Index(['A', 'B'], name='scenario') - ) - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - _ = fs.weights - assert len(w) > 0, 'No warning raised for FlowSystem.weights getter' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) - - -def test_flowsystem_weights_setter(): - """Test FlowSystem.weights setter deprecation.""" - fs = fx.FlowSystem( - timesteps=pd.date_range('2020-01-01', periods=10, freq='h'), - scenarios=pd.Index(['A', 'B', 'C'], name='scenario'), - ) - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - fs.weights = np.array([1, 2, 3]) - assert len(w) > 0, 'No warning raised for FlowSystem.weights setter' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) - - -# === Calculation deprecations === -def test_calculation_active_timesteps_parameter(): - """Test Calculation active_timesteps parameter deprecation.""" - fs = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=10, freq='h')) - bus = fx.Bus('bus') - fs.add_elements(bus, fx.Source('s1', outputs=[fx.Flow('out', 'bus', 10)])) - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - _ = fx.calculation.Calculation('test', fs, active_timesteps=pd.date_range('2020-01-01', periods=5, freq='h')) - assert len(w) > 0, 'No warning raised for Calculation active_timesteps parameter' - # Check that the active_timesteps deprecation warning is in the list (may not be first due to class-level warning) - messages = [str(warning.message) for warning in w] - assert any( - 'active_timesteps' in msg and f'will be removed in v{DEPRECATION_REMOVAL_VERSION}' in msg - for msg in messages - ) - - -def test_calculation_active_timesteps_property(): - """Test Calculation.active_timesteps property deprecation.""" - fs = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=10, freq='h')) - bus = fx.Bus('bus') - fs.add_elements(bus, fx.Source('s1', outputs=[fx.Flow('out', 'bus', 10)])) - calc = fx.calculation.Calculation('test', fs) - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - _ = calc.active_timesteps - assert len(w) > 0, 'No warning raised for Calculation.active_timesteps property' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) - - -# === Config function deprecations === -def test_change_logging_level_function(): - """Test change_logging_level() function deprecation.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - change_logging_level('INFO') - assert len(w) > 0, 'No warning raised for change_logging_level()' - assert f'will be removed in version {DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) - - -# === Results-related deprecations === -@pytest.fixture -def simple_results(): - """Create a simple calculation results object for testing.""" - # Create a minimal flow system - fs = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=5, freq='h')) - bus1 = fx.Bus('bus1') - source = fx.Source('source1', outputs=[fx.Flow('out', 'bus1', size=10, effects_per_flow_hour=1)]) - sink = fx.Sink('sink1', inputs=[fx.Flow('in', 'bus1', size=10)]) - fs.add_elements( - bus1, - fx.Effect('costs', '€', 'Costs', is_standard=True, is_objective=True), - source, - sink, - ) - - # Create and solve calculation - calc = fx.FullCalculation('test', fs) - calc.do_modeling() - solver = fx.solvers.HighsSolver(mip_gap=0.01, time_limit_seconds=30) - calc.solve(solver) - - return calc.results - - -def test_results_flow_system_parameter(simple_results): - """Test CalculationResults flow_system parameter deprecation.""" - # Get the flow_system_data from existing results - fs_data = simple_results.flow_system_data - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - # Create new results with deprecated parameter - from flixopt.results import CalculationResults - - _ = CalculationResults( - solution=simple_results.solution, - flow_system_data=None, # Will be overridden by deprecated parameter - flow_system=fs_data, # deprecated parameter - name=simple_results.name, - summary=simple_results.summary, - folder=None, - ) - assert len(w) > 0, 'No warning raised for flow_system parameter' - # Check that the flow_system parameter deprecation warning is in the list (may not be first due to class-level warning) - messages = [str(warning.message) for warning in w] - assert any( - 'flow_system' in msg and f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in msg for msg in messages - ) - - -def test_results_plot_node_balance_indexer(simple_results): - """Test ComponentResults.plot_node_balance indexer parameter deprecation.""" - # Get actual time values from the results - time_coords = simple_results.solution.coords['time'] - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - simple_results['source1'].plot_node_balance( - indexer={'time': slice(time_coords[0].values, time_coords[2].values)}, show=False, save=False - ) - assert len(w) > 0, 'No warning raised for plot_node_balance indexer parameter' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) - - -def test_plot_heatmap_function_heatmap_params(): - """Test plot_heatmap function heatmap_timeframes/heatmap_timesteps_per_frame parameters.""" - # Create simple test data - 7 days * 24 hours = 168 hours - data = xr.DataArray( - np.random.rand(168), - coords={'time': pd.date_range('2020-01-01', periods=168, freq='h')}, - dims=['time'], - ) - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - plot_heatmap( - data, - name='test', - heatmap_timeframes='D', # Days - heatmap_timesteps_per_frame='h', # Hours - show=False, - save=False, - ) - assert len(w) > 0, 'No warning raised for heatmap_timeframes/heatmap_timesteps_per_frame' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) - - -def test_plot_heatmap_function_color_map(): - """Test plot_heatmap function color_map parameter.""" - data = xr.DataArray( - np.random.rand(24), - coords={'time': pd.date_range('2020-01-01', periods=24, freq='h')}, - dims=['time'], - ) - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - plot_heatmap(data, name='test', color_map='viridis', show=False, save=False) - assert len(w) > 0, 'No warning raised for color_map parameter' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) - - -def test_plot_heatmap_function_indexer(): - """Test plot_heatmap function indexer parameter.""" - time_index = pd.date_range('2020-01-01', periods=24, freq='h') - data = xr.DataArray( - np.random.rand(24), - coords={'time': time_index}, - dims=['time'], - ) - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', DeprecationWarning) - # Use actual datetime values for slicing - plot_heatmap(data, name='test', indexer={'time': slice(time_index[0], time_index[10])}, show=False, save=False) - assert len(w) > 0, 'No warning raised for indexer parameter' - assert f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}' in str(w[0].message) From 6fe49c88cf8aa7b1665cbe7babbd4002a6eab4f2 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sun, 30 Nov 2025 02:52:29 +0100 Subject: [PATCH 06/49] ci: improve and split into multiple files --- .github/workflows/release.yaml | 2 +- .github/workflows/{test.yaml => tests.yaml} | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) rename .github/workflows/{test.yaml => tests.yaml} (96%) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 8d0d0de1f..4f1edd3e8 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -80,7 +80,7 @@ jobs: name: Run tests needs: [check-preparation] if: needs.check-preparation.outputs.prepared == 'true' - uses: ./.github/workflows/test.yaml + uses: ./.github/workflows/tests.yaml build: name: Build package diff --git a/.github/workflows/test.yaml b/.github/workflows/tests.yaml similarity index 96% rename from .github/workflows/test.yaml rename to .github/workflows/tests.yaml index 395fc766c..5fa245f49 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/tests.yaml @@ -4,9 +4,9 @@ on: push: branches: [main] pull_request: - branches: ["*"] + branches: ["**"] workflow_dispatch: - workflow_call: # Allow release.yaml to call this workflow + workflow_call: # Allow release.yaml to call this workflow. concurrency: group: ${{ github.workflow }}-${{ github.ref }} From 30c4a6808a86f5ad2f9ffb517d4b6e674da38576 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sun, 30 Nov 2025 03:28:36 +0100 Subject: [PATCH 07/49] Feature/rename on off to status (#500) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Perfect! Here's the **final complete renaming table with Option A**: ## Parameters Class (`OnOffParameters` → `StatusParameters`) | Current Name | Recommended Name | Rationale | |--------------|------------------|-----------| | `OnOffParameters` | **`StatusParameters`** | Aligns with PyPSA, clearer semantics | | `effects_per_switch_on` | **`effects_per_startup`** | Standard UC terminology | | `effects_per_running_hour` | **`effects_per_active_hour`** | Clear, concise, matches "active" state | | `on_hours_total_min` | **`active_hours_min`** | Total (not consecutive) active hours | | `on_hours_total_max` | **`active_hours_max`** | Total (not consecutive) active hours | | `consecutive_on_hours_min` | **`min_uptime`** | Standard UC term (consecutive) | | `consecutive_on_hours_max` | **`max_uptime`** | Standard UC term (consecutive) | | `consecutive_off_hours_min` | **`min_downtime`** | Standard UC term (consecutive) | | `consecutive_off_hours_max` | **`max_downtime`** | Standard UC term (consecutive) | | `switch_on_total_max` | **`startup_limit`** | Clearer intent, matches "startup" | | `force_switch_on` | **`force_startup_tracking`** | More explicit about what is forced | ## Model Class (`OnOffModel` → `StatusModel`) ### Class Name | Current Name | Recommended Name | |--------------|------------------| | `OnOffModel` | **`StatusModel`** | ### Constructor Parameters | Current Name | Recommended Name | Rationale | |--------------|------------------|-----------| | `on_variable` | **`status`** | Aligns with PyPSA and literature | | `previous_states` | **`previous_status`** | Consistency with status variable | ### Variables (short_name in add_variables/expression_tracking_variable) | Current Name | Recommended Name | Type | Notes | |--------------|------------------|------|-------| | `self.on` | **`self.status`** | Input variable | Main binary state variable | | `'off'` | **Remove variable** | Binary variable | Replace with expression `1 - status` | | `'switch\|on'` | **`'startup'`** | Binary variable | Startup event indicator | | `'switch\|off'` | **`'shutdown'`** | Binary variable | Shutdown event indicator | | `'switch\|count'` | **`'startup_count'`** | Integer variable | Number of startups | | `'on_hours_total'` | **`'active_hours'`** | Continuous variable | Total active duration | | `'consecutive_on_hours'` | **`'uptime'`** | Continuous variable | Consecutive active hours | | `'consecutive_off_hours'` | **`'downtime'`** | Continuous variable | Consecutive inactive hours | ### Properties | Current Name | Recommended Name | Returns | Meaning | |--------------|------------------|---------|---------| | `on_hours_total` | **`active_hours`** | `linopy.Variable` | Total active hours | | `off` | **Remove property** | — | Use `1 - status` expression | | `switch_on` | **`startup`** | `linopy.Variable \| None` | Startup events | | `switch_off` | **`shutdown`** | `linopy.Variable \| None` | Shutdown events | | `switch_on_nr` | **`startup_count`** | `linopy.Variable \| None` | Number of startups | | `consecutive_on_hours` | **`uptime`** | `linopy.Variable \| None` | Consecutive active hours | | `consecutive_off_hours` | **`downtime`** | `linopy.Variable \| None` | Consecutive inactive hours | ### Internal Methods | Current Name | Recommended Name | |--------------|------------------| | `_get_previous_on_duration()` | **`_get_previous_uptime()`** | | `_get_previous_off_duration()` | **`_get_previous_downtime()`** | ### Internal Properties/Flags (in parameters) | Current Name | Recommended Name | |--------------|------------------| | `use_off` | **Remove** (use expression instead) | | `use_switch_on` | **`use_startup_tracking`** | | `use_consecutive_on_hours` | **`use_uptime_tracking`** | | `use_consecutive_off_hours` | **`use_downtime_tracking`** | ## Constraint Names (short_name in add_constraints) | Current Name | Recommended Name | |--------------|------------------| | `'complementary'` | **Remove** (no off variable) | | `'on_hours_total'` | **`'active_hours'`** | | `'switch\|on'`, `'switch\|off'` | **`'startup'`, `'shutdown'`** | | `'switch\|count'` | **`'startup_count'`** | | `'consecutive_on_hours'` | **`'uptime'`** | | `'consecutive_off_hours'` | **`'downtime'`** | ## Complete Terminology Summary (Option A) **State:** - `status` (binary): 1 = active, 0 = inactive **Events:** - `startup` (binary): transition from inactive to active - `shutdown` (binary): transition from active to inactive **Durations:** - `active_hours` (continuous): **total** hours in active state across time horizon - `uptime` (continuous): **consecutive** hours currently active (UC standard) - `downtime` (continuous): **consecutive** hours currently inactive (UC standard) **Parameter Bounds:** - `active_hours_min/max`: limits on **total** active hours - `min_uptime/max_uptime`: limits on **consecutive** active hours (UC standard) - `min_downtime/max_downtime`: limits on **consecutive** inactive hours (UC standard) - `startup_limit`: maximum number of startup events **Effects:** - `effects_per_startup`: costs/impacts per startup event - `effects_per_active_hour`: costs/impacts per active hour This aligns perfectly with PyPSA and the unit commitment literature! 🎯 * Refactor tests and examples * Refactor tests and examples * Update CHANGELOG.md * Python Docstrings Updated: 1. interface.py - Module docstring now references "Status decisions" 2. components.py - Updated all docstrings: - status_parameters parameter descriptions - Example code updated with new parameter names (effects_per_startup, min_uptime, startup_limit) - Fixed incorrect "OnOff feature" docstring to "Investment feature" - Updated TODO comment to reference StatusParameters 3. linear_converters.py - All docstrings updated: - Import statement updated to StatusParameters - All parameter descriptions updated - All example code updated with new terminology 4. flow_system.py - Updated references from "consecutive_on_hours" to "uptime and downtime" and on_off_parameters to status_parameters 5. modeling.py - Updated docstring from "switch-on/off variables" to "state transition constraints for binary switching variables" Documentation Markdown Files Updated: 1. Flow.md - All references updated: - Links to StatusParameters - "on/off state" → "active/inactive state" - Parameter names updated 2. StatusParameters.md (renamed from OnOffParameters.md) - Comprehensive updates: - Title changed to "StatusParameters" - All terminology updated: on/off → active/inactive - Mathematical notation updated: s^on/s^off → s^startup/s^shutdown - Duration variables: d^on/d^off → d^uptime/d^downtime - Parameter names updated in all examples - All Python code examples updated with new API 3. Other modeling pattern docs - Updated all references to StatusParameters and active/inactive terminology 4. mkdocs.yml - Navigation updated to reference StatusParameters.md All docstrings and documentation now consistently use the new Status terminology aligned with PyPSA and unit commitment standards! * Update remaining mentions of old parameters * ⏺ Perfect! I've addressed all the actionable review comments: Changes Made: 1. Fixed error message in modeling.py - Corrected ModelingPrimitives.state_transition_bounds() → BoundingPatterns.state_transition_bounds() in error message (flixopt/modeling.py:591) 2. Fixed Transmission type hint (flixopt/components.py:667) - Changed status_parameters: StatusParameters = None → status_parameters: StatusParameters | None = None 3. Fixed absolute_losses=0 edge case (flixopt/components.py:768) - Added np.any(self.element.absolute_losses != 0) check in create_transmission_equation to match the initialization logic - This prevents AttributeError when absolute_losses is explicitly set to 0 4. Updated test assertion messages (tests/test_component.py) - Changed "On does not work properly" → "Status does not work properly" 5. Fixed effects_per_startup type (examples/02_Complex/complex_example.py) - Changed scalar effects_per_startup=0.01 → dict effects_per_startup={Costs.label: 0.01} in all 3 occurrences - Now consistent with the StatusParameters API which expects a dict mapping effect names to values 6. Updated test_functional.py docstring - Removed reference to non-existent TestStatus class - Updated to accurately describe the status-related test functions 7. Consistent unbounded upper bounds (flixopt/features.py:191) - Changed np.inf → None for unbounded active_hours_max - Now consistent with FlowModel's total_flow_hours pattern All changes maintain backward compatibility and align with the codebase's existing patterns. The documentation in index.md was already correct (BoundingPatterns is the right class for state_transition_bounds). * Changes Made: 1. CHANGELOG.md - Fixed parameter rename documentation (lines 89-90) - Changed incorrect status_parameters → status_parameters - To correct: on_off_parameters → status_parameters 2. CHANGELOG.md - Removed duplicate logger warning (line 803 in v2.1.0) - Removed duplicate entry that was already documented in v2.0.1 - Fixed v2.0.1 entry to say on_off_parameters (the name at that time) 3. StatusParameters.md - Aligned flow bounds formulation (line 229) - Updated summary to include max(ε, rel_lower) like the main text - Now consistent: s(t) · P · max(ε, rel_lower) ≤ p(t) ≤ s(t) · P · rel_upper 4. features.py - Narrowed previous_status type hint (line 155) - Changed from Numeric_TPS | None to xr.DataArray | None - Added import xarray as xr (line 12) - This accurately reflects that _get_previous_uptime() and _get_previous_downtime() use xarray APIs All changes are verified to compile correctly and maintain consistency with the codebase patterns! * Fixed Issues 1. Constraint naming in tests (tests/test_component.py:126-127, 158, 168, 338, 348): - Updated test expectations from 'TestComponent|on|lb' and 'TestComponent|on|ub' to 'TestComponent|status|lb' and 'TestComponent|status|ub' to match the actual constraint names 2. Added 'off' property to StatusModel (flixopt/features.py:284-287): - Added a new property that returns 1 - self.status for backward compatibility with tests expecting an off attribute 3. Fixed deprecated parameter name (tests/test_functional.py:435): - Changed force_switch_on=True to force_startup_tracking=True in StatusParameters 4. Fixed property name (tests/test_functional.py:466): - Changed switch_off to shutdown to match the actual property name in StatusModel * Delete mistakingly added files * Delete mistakingly added files * Final touches * Final touches * Replace off with inactive * Rename low level parameetrs as well: switch_on -> activate switch_off -> deactivate * Rename low level parameetrs as well: switch_on -> activate switch_off -> deactivate state_variable -> state * Rename low level parameetrs as well: switch_on -> activate switch_off -> deactivate state_variable -> state * Docstring Improvements Summary ✅ All Parameters Now Documented Each primitive now has complete parameter documentation with: - Clear description of what each parameter does - Type expectations - Default values where applicable ✅ Focused on Math & Parameters Removed: - Excessive examples at low level - Use case lists that belong at higher levels Enhanced: - Mathematical formulations (using proper · symbol for multiplication) - Clear behavior descriptions - Precise return value documentation Updated Functions: ModelingPrimitives: 1. expression_tracking_variable - All 6 parameters documented - Clear math formulation 2. consecutive_duration_tracking - All 9 parameters documented - Explained Big-M value - Clear what constraints are returned 3. mutual_exclusivity_constraint - All 4 parameters documented - Simplified, focused on math BoundingPatterns: 4. basic_bounds - All 4 parameters documented - Concise formulation 5. bounds_with_state - All 5 parameters documented - Explained epsilon (ε) usage 6. scaled_bounds - All 5 parameters documented - Clear scaling relationship 7. scaled_bounds_with_state - All 7 parameters documented - Explained Big-M formulation 8. state_transition_bounds - All 7 parameters documented - Removed verbose examples, kept math focus 9. continuous_transition_bounds - All 8 parameters documented - Clear Big-M constraint explanation Result ✅ All parameters documented ✅ Math-focused docstrings ✅ Consistent format across all primitives ✅ Tests still passing The modeling primitives now have professional, complete documentation! * Update docs * Add missing type hints * Fix bullet points * Fix bullet points * Re-apply changes from main * Bugfix: Usage of old on_off_parameters * Update CHANGELOG.md * Update CHANGELOG.md * Update CHANGELOG.md * Fix typos * Improve flagging of wether to create inactive varaible * Improve default upper bound of active_hours * Bugfix self._model.hours_per_step.sum('time').item() with scenarios/periods * Fix test * FIx names * pdate the test assertions to expect upper=total_hours instead of upper=inf when active_hours_max is not specified * Empty * Trigger CI * Fix test * Triggger CI * Summary of Fixes 1. Return type annotation for consecutive_duration_tracking (flixopt/modeling.py:255): - Changed from tuple[linopy.Variable, tuple[linopy.Constraint, linopy.Constraint, linopy.Constraint]] - To tuple[dict[str, linopy.Variable], dict[str, linopy.Constraint]] to match the actual return value 2. Clarified inactive property docstring (flixopt/features.py:284-291): - Replaced the confusing "deprecated" note with a clear explanation that: - The variable is only created when downtime tracking is enabled - Users should prefer 1 - status expression for general use 3. Fixed _get_previous_uptime docstring (flixopt/features.py:318-322): - Clarified that it returns 0 when no previous status is provided (assumes previously inactive) 4. Fixed _get_previous_downtime docstring (flixopt/features.py:329-333): - Clarified that it returns one timestep duration when no previous status is provided (assumes previously inactive) 5. No action needed for effects_per_startup without use_startup_tracking: - Verified that use_startup_tracking already returns True when effects_per_startup has values (line 1258 in interface.py), so this is already handled correctly 6. Test fixes (tests/test_flow.py): - Updated three test assertions to use model.hours_per_step.sum('time') as the expected upper bound for active_hours when active_hours_max is not specified * Trigger CI --- CHANGELOG.md | 68 ++- docs/user-guide/core-concepts.md | 2 +- .../mathematical-notation/dimensions.md | 2 +- .../effects-penalty-objective.md | 4 + .../mathematical-notation/elements/Flow.md | 11 +- .../mathematical-notation/elements/Storage.md | 2 + .../features/InvestParameters.md | 8 + .../features/OnOffParameters.md | 307 ------------- .../features/StatusParameters.md | 317 +++++++++++++ .../user-guide/mathematical-notation/index.md | 8 +- .../modeling-patterns/bounds-and-states.md | 16 +- .../modeling-patterns/duration-tracking.md | 13 +- .../modeling-patterns/index.md | 4 +- .../modeling-patterns/state-transitions.md | 26 +- docs/user-guide/recipes/index.md | 2 +- examples/02_Complex/complex_example.py | 26 +- .../02_Complex/complex_example_results.py | 4 +- .../example_optimization_modes.py | 4 +- examples/04_Scenarios/scenario_example.py | 4 +- .../two_stage_optimization.py | 6 +- flixopt/__init__.py | 4 +- flixopt/components.py | 46 +- flixopt/elements.py | 201 +++++---- flixopt/features.py | 181 ++++---- flixopt/flow_system.py | 4 +- flixopt/interface.py | 233 +++++----- flixopt/linear_converters.py | 76 ++-- flixopt/modeling.py | 304 +++++++------ mkdocs.yml | 2 +- tests/conftest.py | 32 +- tests/test_component.py | 174 +++---- tests/test_flow.py | 426 +++++++++--------- tests/test_functional.py | 60 +-- tests/test_linear_converter.py | 68 +-- tests/test_scenarios.py | 20 +- tests/test_storage.py | 7 +- 36 files changed, 1412 insertions(+), 1260 deletions(-) create mode 100644 docs/user-guide/mathematical-notation/features/StatusParameters.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 7544375ff..3bc226e40 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -51,14 +51,76 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp ## [Unreleased] - ????-??-?? -**Summary**: - -If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOpt/flixOpt/releases/tag/v3.0.0) and [Migration Guide](https://flixopt.github.io/flixopt/latest/user-guide/migration-guide-v3/). +**Summary**: Renamed OnOff terminology to Status terminology for better alignment with PyPSA and unit commitment standards. ### ✨ Added ### 💥 Breaking Changes +**Renamed `OnOffParameters` → `StatusParameters`**: Complete terminology update to align with industry standards (PyPSA, unit commitment). This is a clean breaking change with no backwards compatibility wrapper. + +**Class and Constructor Parameters:** + +| Category | Old Name (OnOffParameters) | New Name (StatusParameters) | Notes | +|----------|---------------------------|----------------------------|-------| +| **Class** | `OnOffParameters` | `StatusParameters` | Main class renamed | +| **Constructor** | `on_variable` | `status` | Model variable parameter | +| **Constructor** | `previous_states` | `previous_status` | Initial state parameter | +| **Parameter** | `effects_per_switch_on` | `effects_per_startup` | Startup costs/impacts | +| **Parameter** | `effects_per_running_hour` | `effects_per_active_hour` | Operating costs/impacts | +| **Parameter** | `on_hours_total_min` | `active_hours_min` | Minimum total operating hours | +| **Parameter** | `on_hours_total_max` | `active_hours_max` | Maximum total operating hours | +| **Parameter** | `consecutive_on_hours_min` | `min_uptime` | UC standard terminology | +| **Parameter** | `consecutive_on_hours_max` | `max_uptime` | UC standard terminology | +| **Parameter** | `consecutive_off_hours_min` | `min_downtime` | UC standard terminology | +| **Parameter** | `consecutive_off_hours_max` | `max_downtime` | UC standard terminology | +| **Parameter** | `switch_on_total_max` | `startup_limit` | Maximum number of startups | +| **Parameter** | `force_switch_on` | `force_startup_tracking` | Force creation of startup variables | + +**Model Classes and Variables:** + +| Category | Old Name (OnOffModel) | New Name (StatusModel) | Notes | +|----------|----------------------|------------------------|-------| +| **Model Class** | `OnOffModel` | `StatusModel` | Feature model class | +| **Variable** | `on` | `status` | Main binary state variable | +| **Variable** | `switch_on` | `startup` | Startup event variable | +| **Variable** | `switch_off` | `shutdown` | Shutdown event variable | +| **Variable** | `switch_on_nr` | `startup_count` | Cumulative startup counter | +| **Variable** | `on_hours_total` | `active_hours` | Total operating hours | +| **Variable** | `consecutive_on_hours` | `uptime` | Consecutive active hours | +| **Variable** | `consecutive_off_hours` | `downtime` | Consecutive inactive hours | +| **Variable** | `off` | `inactive` | Deprecated - use `1 - status` instead | + +**Flow and Component API:** + +| Category | Old Name | New Name | Location | +|----------|----------|----------|----------| +| **Parameter** | `on_off_parameters` | `status_parameters` | `Flow.__init__()` | +| **Parameter** | `on_off_parameters` | `status_parameters` | `Component.__init__()` | +| **Property** | `flow.submodel.on_off` | `flow.submodel.status` | Flow submodel access | +| **Property** | `component.submodel.on_off` | `component.submodel.status` | Component submodel access | + +**Internal Properties:** + +| Old Name | New Name | +|----------|----------| +| `use_switch_on` | `use_startup_tracking` | +| `use_consecutive_on_hours` | `use_uptime_tracking` | +| `use_consecutive_off_hours` | `use_downtime_tracking` | +| `with_on_off` | `with_status` | +| `previous_states` | `previous_status` | + +**Migration Guide**: + +Use find-and-replace to update your code with the mappings above. The functionality is identical - only naming has changed. + +**Important**: This is a complete renaming with no backwards compatibility. The change affects: +- Constructor parameter names +- Model variable names and property access +- Results access patterns + +A partial backwards compatibility wrapper would be misleading, so we opted for a clean breaking change. + ### ♻️ Changed ### 🗑️ Deprecated diff --git a/docs/user-guide/core-concepts.md b/docs/user-guide/core-concepts.md index f165f1e4e..d63f10f27 100644 --- a/docs/user-guide/core-concepts.md +++ b/docs/user-guide/core-concepts.md @@ -28,7 +28,7 @@ Element labels must be unique across all types. See the [`FlowSystem` API refere - Have a `size` which, generally speaking, defines how much energy or material can be moved. Usually measured in MW, kW, m³/h, etc. - Have a `flow_rate`, which defines how fast energy or material is transported. Usually measured in MW, kW, m³/h, etc. -- Have constraints to limit the flow-rate (min/max, total flow hours, on/off etc.) +- Have constraints to limit the flow-rate (min/max, total flow hours, active/inactive status etc.) - Can have fixed profiles (for demands or renewable generation) - Can have [Effects](#effects) associated by their use (costs, emissions, labour, ...) diff --git a/docs/user-guide/mathematical-notation/dimensions.md b/docs/user-guide/mathematical-notation/dimensions.md index e10ef5ffd..2a526e19d 100644 --- a/docs/user-guide/mathematical-notation/dimensions.md +++ b/docs/user-guide/mathematical-notation/dimensions.md @@ -102,7 +102,7 @@ Scenarios within a period are **operationally independent**: - Each scenario has its own operational variables: $p(\text{t}_i, s_1)$ and $p(\text{t}_i, s_2)$ are independent - Scenarios cannot exchange energy, information, or resources - Storage states are separate: $c(\text{t}_i, s_1) \neq c(\text{t}_i, s_2)$ -- Binary states (on/off) are independent: $s(\text{t}_i, s_1)$ vs $s(\text{t}_i, s_2)$ +- Binary states (active/inactive) are independent: $s(\text{t}_i, s_1)$ vs $s(\text{t}_i, s_2)$ Scenarios are connected **only through the objective function** via weights: diff --git a/docs/user-guide/mathematical-notation/effects-penalty-objective.md b/docs/user-guide/mathematical-notation/effects-penalty-objective.md index 1c96f3613..aeab09031 100644 --- a/docs/user-guide/mathematical-notation/effects-penalty-objective.md +++ b/docs/user-guide/mathematical-notation/effects-penalty-objective.md @@ -7,6 +7,7 @@ **Example:** [`Flows`][flixopt.elements.Flow] have an attribute `effects_per_flow_hour` that defines the effect contribution per flow-hour: + - Costs (€/kWh) - Emissions (kg CO₂/kWh) - Primary energy consumption (kWh_primary/kWh) @@ -260,6 +261,7 @@ $$ $$ Where: + - $\mathcal{S}$ is the set of scenarios - $w_s$ is the weight for scenario $s$ (typically scenario probability) - Periodic effects are **shared across scenarios**: $E_{\Omega,\text{per}}$ and $E_{\Phi,\text{per}}$ (same for all $s$) @@ -280,6 +282,7 @@ $$ $$ Where: + - $\mathcal{Y}$ is the set of periods (e.g., years) - $w_y$ is the weight for period $y$ (typically annual discount factor) - Each period $y$ has **independent** periodic and temporal effects (including penalty) @@ -295,6 +298,7 @@ $$ $$ Where: + - $\mathcal{S}$ is the set of scenarios - $\mathcal{Y}$ is the set of periods - $w_y$ is the period weight (for periodic effects) diff --git a/docs/user-guide/mathematical-notation/elements/Flow.md b/docs/user-guide/mathematical-notation/elements/Flow.md index 5914ba911..2cc2e3b6a 100644 --- a/docs/user-guide/mathematical-notation/elements/Flow.md +++ b/docs/user-guide/mathematical-notation/elements/Flow.md @@ -23,8 +23,8 @@ $$ $$ -This mathematical formulation can be extended by using [OnOffParameters](../features/OnOffParameters.md) -to define the on/off state of the Flow, or by using [InvestParameters](../features/InvestParameters.md) +This mathematical formulation can be extended by using [StatusParameters](../features/StatusParameters.md) +to define the active/inactive state of the Flow, or by using [InvestParameters](../features/InvestParameters.md) to change the size of the Flow from a constant to an optimization variable. --- @@ -34,7 +34,7 @@ to change the size of the Flow from a constant to an optimization variable. Flow formulation uses the following modeling patterns: - **[Scaled Bounds](../modeling-patterns/bounds-and-states.md#scaled-bounds)** - Basic flow rate bounds (equation $\eqref{eq:flow_rate}$) -- **[Scaled Bounds with State](../modeling-patterns/bounds-and-states.md#scaled-bounds-with-state)** - When combined with [OnOffParameters](../features/OnOffParameters.md) +- **[Scaled Bounds with State](../modeling-patterns/bounds-and-states.md#scaled-bounds-with-state)** - When combined with [StatusParameters](../features/StatusParameters.md) - **[Bounds with State](../modeling-patterns/bounds-and-states.md#bounds-with-state)** - Investment decisions with [InvestParameters](../features/InvestParameters.md) --- @@ -44,11 +44,12 @@ Flow formulation uses the following modeling patterns: **Python Class:** [`Flow`][flixopt.elements.Flow] **Key Parameters:** + - `size`: Flow size $\text{P}$ (can be fixed or variable with InvestParameters) - `relative_minimum`, `relative_maximum`: Relative bounds $\text{p}^{\text{L}}_{\text{rel}}, \text{p}^{\text{U}}_{\text{rel}}$ - `effects_per_flow_hour`: Operational effects (costs, emissions, etc.) - `invest_parameters`: Optional investment modeling (see [InvestParameters](../features/InvestParameters.md)) -- `on_off_parameters`: Optional on/off operation (see [OnOffParameters](../features/OnOffParameters.md)) +- `status_parameters`: Optional active/inactive operation (see [StatusParameters](../features/StatusParameters.md)) See the [`Flow`][flixopt.elements.Flow] API documentation for complete parameter list and usage examples. @@ -56,7 +57,7 @@ See the [`Flow`][flixopt.elements.Flow] API documentation for complete parameter ## See Also -- [OnOffParameters](../features/OnOffParameters.md) - Binary on/off operation +- [StatusParameters](../features/StatusParameters.md) - Binary active/inactive operation - [InvestParameters](../features/InvestParameters.md) - Variable flow sizing - [Bus](../elements/Bus.md) - Flow balance constraints - [LinearConverter](../elements/LinearConverter.md) - Flow ratio constraints diff --git a/docs/user-guide/mathematical-notation/elements/Storage.md b/docs/user-guide/mathematical-notation/elements/Storage.md index cd7046592..9ecd4d570 100644 --- a/docs/user-guide/mathematical-notation/elements/Storage.md +++ b/docs/user-guide/mathematical-notation/elements/Storage.md @@ -53,6 +53,7 @@ Storage formulation uses the following modeling patterns: - **[Scaled Bounds](../modeling-patterns/bounds-and-states.md#scaled-bounds)** - For flow rate bounds relative to storage size When combined with investment parameters, storage can use: + - **[Bounds with State](../modeling-patterns/bounds-and-states.md#bounds-with-state)** - Investment decisions (see [InvestParameters](../features/InvestParameters.md)) --- @@ -62,6 +63,7 @@ When combined with investment parameters, storage can use: **Python Class:** [`Storage`][flixopt.components.Storage] **Key Parameters:** + - `capacity_in_flow_hours`: Storage capacity $\text{C}$ - `relative_loss_per_hour`: Self-discharge rate $\dot{\text{c}}_\text{rel,loss}$ - `initial_charge_state`: Initial charge $c(\text{t}_0)$ diff --git a/docs/user-guide/mathematical-notation/features/InvestParameters.md b/docs/user-guide/mathematical-notation/features/InvestParameters.md index 14fe02c79..d998039c0 100644 --- a/docs/user-guide/mathematical-notation/features/InvestParameters.md +++ b/docs/user-guide/mathematical-notation/features/InvestParameters.md @@ -15,6 +15,7 @@ v_\text{invest} = s_\text{invest} \cdot \text{size}_\text{fixed} $$ With: + - $v_\text{invest}$ being the resulting investment size - $s_\text{invest} \in \{0, 1\}$ being the binary investment decision - $\text{size}_\text{fixed}$ being the predefined component size @@ -34,6 +35,7 @@ s_\text{invest} \cdot \text{size}_\text{min} \leq v_\text{invest} \leq s_\text{i $$ With: + - $v_\text{invest}$ being the investment size variable (continuous) - $s_\text{invest} \in \{0, 1\}$ being the binary investment decision - $\text{size}_\text{min}$ being the minimum investment size (if investing) @@ -80,6 +82,7 @@ E_{e,\text{fix}} = s_\text{invest} \cdot \text{fix}_e $$ With: + - $E_{e,\text{fix}}$ being the fixed contribution to effect $e$ - $\text{fix}_e$ being the fixed effect value (e.g., fixed installation cost) @@ -99,6 +102,7 @@ E_{e,\text{spec}} = v_\text{invest} \cdot \text{spec}_e $$ With: + - $E_{e,\text{spec}}$ being the size-dependent contribution to effect $e$ - $\text{spec}_e$ being the specific effect value per unit size (e.g., €/kW) @@ -123,6 +127,7 @@ v_\text{invest} = \sum_{k=1}^{K} \lambda_k \cdot v_k $$ With: + - $E_{e,\text{pw}}$ being the piecewise contribution to effect $e$ - $\lambda_k$ being the piecewise lambda variables (see [Piecewise](../features/Piecewise.md)) - $r_{e,k}$ being the effect rate at piece $k$ @@ -146,6 +151,7 @@ E_{e,\text{retirement}} = (1 - s_\text{invest}) \cdot \text{retirement}_e $$ With: + - $E_{e,\text{retirement}}$ being the retirement contribution to effect $e$ - $\text{retirement}_e$ being the retirement effect value @@ -210,6 +216,7 @@ $$\label{eq:annualization} $$ With: + - $\text{cost}_\text{capital}$ being the upfront investment cost - $r$ being the discount rate - $n$ being the equipment lifetime in years @@ -226,6 +233,7 @@ $$ **Python Class:** [`InvestParameters`][flixopt.interface.InvestParameters] **Key Parameters:** + - `fixed_size`: For binary investments (mutually exclusive with continuous sizing) - `minimum_size`, `maximum_size`: For continuous sizing - `mandatory`: Whether investment is required (default: `False`) diff --git a/docs/user-guide/mathematical-notation/features/OnOffParameters.md b/docs/user-guide/mathematical-notation/features/OnOffParameters.md index 6bf40fec9..e69de29bb 100644 --- a/docs/user-guide/mathematical-notation/features/OnOffParameters.md +++ b/docs/user-guide/mathematical-notation/features/OnOffParameters.md @@ -1,307 +0,0 @@ -# OnOffParameters - -[`OnOffParameters`][flixopt.interface.OnOffParameters] model equipment that operates in discrete on/off states rather than continuous operation. This captures realistic operational constraints including startup costs, minimum run times, cycling limitations, and maintenance scheduling. - -## Binary State Variable - -Equipment operation is modeled using a binary state variable: - -$$\label{eq:onoff_state} -s(t) \in \{0, 1\} \quad \forall t -$$ - -With: -- $s(t) = 1$: equipment is operating (on state) -- $s(t) = 0$: equipment is shutdown (off state) - -This state variable controls the equipment's operational constraints and modifies flow bounds using the **bounds with state** pattern from [Bounds and States](../modeling-patterns/bounds-and-states.md#bounds-with-state). - ---- - -## State Transitions and Switching - -State transitions are tracked using switch variables (see [State Transitions](../modeling-patterns/state-transitions.md#binary-state-transitions)): - -$$\label{eq:onoff_transitions} -s^\text{on}(t) - s^\text{off}(t) = s(t) - s(t-1) \quad \forall t > 0 -$$ - -$$\label{eq:onoff_switch_exclusivity} -s^\text{on}(t) + s^\text{off}(t) \leq 1 \quad \forall t -$$ - -With: -- $s^\text{on}(t) \in \{0, 1\}$: equals 1 when switching from off to on (startup) -- $s^\text{off}(t) \in \{0, 1\}$: equals 1 when switching from on to off (shutdown) - -**Behavior:** -- Off → On: $s^\text{on}(t) = 1, s^\text{off}(t) = 0$ -- On → Off: $s^\text{on}(t) = 0, s^\text{off}(t) = 1$ -- No change: $s^\text{on}(t) = 0, s^\text{off}(t) = 0$ - ---- - -## Effects and Costs - -### Switching Effects - -Effects incurred when equipment starts up: - -$$\label{eq:onoff_switch_effects} -E_{e,\text{switch}} = \sum_{t} s^\text{on}(t) \cdot \text{effect}_{e,\text{switch}} -$$ - -With: -- $\text{effect}_{e,\text{switch}}$ being the effect value per startup event - -**Examples:** -- Startup fuel consumption -- Wear and tear costs -- Labor costs for startup procedures -- Inrush power demands - ---- - -### Running Effects - -Effects incurred while equipment is operating: - -$$\label{eq:onoff_running_effects} -E_{e,\text{run}} = \sum_{t} s(t) \cdot \Delta t \cdot \text{effect}_{e,\text{run}} -$$ - -With: -- $\text{effect}_{e,\text{run}}$ being the effect rate per operating hour -- $\Delta t$ being the time step duration - -**Examples:** -- Fixed operating and maintenance costs -- Auxiliary power consumption -- Consumable materials -- Emissions while running - ---- - -## Operating Hour Constraints - -### Total Operating Hours - -Bounds on total operating time across the planning horizon: - -$$\label{eq:onoff_total_hours} -h_\text{min} \leq \sum_{t} s(t) \cdot \Delta t \leq h_\text{max} -$$ - -With: -- $h_\text{min}$ being the minimum total operating hours -- $h_\text{max}$ being the maximum total operating hours - -**Use cases:** -- Minimum runtime requirements (contracts, maintenance) -- Maximum runtime limits (fuel availability, permits, equipment life) - ---- - -### Consecutive Operating Hours - -**Minimum Consecutive On-Time:** - -Enforces minimum runtime once started using duration tracking (see [Duration Tracking](../modeling-patterns/duration-tracking.md#minimum-duration-constraints)): - -$$\label{eq:onoff_min_on_duration} -d^\text{on}(t) \geq (s(t-1) - s(t)) \cdot h^\text{on}_\text{min} \quad \forall t > 0 -$$ - -With: -- $d^\text{on}(t)$ being the consecutive on-time duration at time $t$ -- $h^\text{on}_\text{min}$ being the minimum required on-time - -**Behavior:** -- When shutting down at time $t$: enforces equipment was on for at least $h^\text{on}_\text{min}$ prior to the switch -- Prevents short cycling and frequent startups - -**Maximum Consecutive On-Time:** - -Limits continuous operation before requiring shutdown: - -$$\label{eq:onoff_max_on_duration} -d^\text{on}(t) \leq h^\text{on}_\text{max} \quad \forall t -$$ - -**Use cases:** -- Mandatory maintenance intervals -- Process batch time limits -- Thermal cycling requirements - ---- - -### Consecutive Shutdown Hours - -**Minimum Consecutive Off-Time:** - -Enforces minimum shutdown duration before restarting: - -$$\label{eq:onoff_min_off_duration} -d^\text{off}(t) \geq (s(t) - s(t-1)) \cdot h^\text{off}_\text{min} \quad \forall t > 0 -$$ - -With: -- $d^\text{off}(t)$ being the consecutive off-time duration at time $t$ -- $h^\text{off}_\text{min}$ being the minimum required off-time - -**Use cases:** -- Cooling periods -- Maintenance requirements -- Process stabilization - -**Maximum Consecutive Off-Time:** - -Limits shutdown duration before mandatory restart: - -$$\label{eq:onoff_max_off_duration} -d^\text{off}(t) \leq h^\text{off}_\text{max} \quad \forall t -$$ - -**Use cases:** -- Equipment preservation requirements -- Process stability needs -- Contractual minimum activity levels - ---- - -## Cycling Limits - -Maximum number of startups across the planning horizon: - -$$\label{eq:onoff_max_switches} -\sum_{t} s^\text{on}(t) \leq n_\text{max} -$$ - -With: -- $n_\text{max}$ being the maximum allowed number of startups - -**Use cases:** -- Preventing excessive equipment wear -- Grid stability requirements -- Operational complexity limits -- Maintenance budget constraints - ---- - -## Integration with Flow Bounds - -OnOffParameters modify flow rate bounds by coupling them to the on/off state. - -**Without OnOffParameters** (continuous operation): -$$ -P \cdot \text{rel}_\text{lower} \leq p(t) \leq P \cdot \text{rel}_\text{upper} -$$ - -**With OnOffParameters** (binary operation): -$$ -s(t) \cdot P \cdot \max(\varepsilon, \text{rel}_\text{lower}) \leq p(t) \leq s(t) \cdot P \cdot \text{rel}_\text{upper} -$$ - -Using the **bounds with state** pattern from [Bounds and States](../modeling-patterns/bounds-and-states.md#bounds-with-state). - -**Behavior:** -- When $s(t) = 0$: flow is forced to zero -- When $s(t) = 1$: flow follows normal bounds - ---- - -## Complete Formulation Summary - -For equipment with OnOffParameters, the complete constraint system includes: - -1. **State variable:** $s(t) \in \{0, 1\}$ -2. **Switch tracking:** $s^\text{on}(t) - s^\text{off}(t) = s(t) - s(t-1)$ -3. **Switch exclusivity:** $s^\text{on}(t) + s^\text{off}(t) \leq 1$ -4. **Duration tracking:** - - On-duration: $d^\text{on}(t)$ following duration tracking pattern - - Off-duration: $d^\text{off}(t)$ following duration tracking pattern -5. **Minimum on-time:** $d^\text{on}(t) \geq (s(t-1) - s(t)) \cdot h^\text{on}_\text{min}$ -6. **Maximum on-time:** $d^\text{on}(t) \leq h^\text{on}_\text{max}$ -7. **Minimum off-time:** $d^\text{off}(t) \geq (s(t) - s(t-1)) \cdot h^\text{off}_\text{min}$ -8. **Maximum off-time:** $d^\text{off}(t) \leq h^\text{off}_\text{max}$ -9. **Total hours:** $h_\text{min} \leq \sum_t s(t) \cdot \Delta t \leq h_\text{max}$ -10. **Cycling limit:** $\sum_t s^\text{on}(t) \leq n_\text{max}$ -11. **Flow bounds:** $s(t) \cdot P \cdot \text{rel}_\text{lower} \leq p(t) \leq s(t) \cdot P \cdot \text{rel}_\text{upper}$ - ---- - -## Implementation - -**Python Class:** [`OnOffParameters`][flixopt.interface.OnOffParameters] - -**Key Parameters:** -- `effects_per_switch_on`: Costs per startup event -- `effects_per_running_hour`: Costs per hour of operation -- `on_hours_min`, `on_hours_max`: Total runtime bounds -- `consecutive_on_hours_min`, `consecutive_on_hours_max`: Consecutive runtime bounds -- `consecutive_off_hours_min`, `consecutive_off_hours_max`: Consecutive shutdown bounds -- `switch_on_max`: Maximum number of startups -- `force_switch_on`: Create switch variables even without limits (for tracking) - -See the [`OnOffParameters`][flixopt.interface.OnOffParameters] API documentation for complete parameter list and usage examples. - -**Mathematical Patterns Used:** -- [State Transitions](../modeling-patterns/state-transitions.md#binary-state-transitions) - Switch tracking -- [Duration Tracking](../modeling-patterns/duration-tracking.md) - Consecutive time constraints -- [Bounds with State](../modeling-patterns/bounds-and-states.md#bounds-with-state) - Flow control - -**Used in:** -- [`Flow`][flixopt.elements.Flow] - On/off operation for flows -- All components supporting discrete operational states - ---- - -## Examples - -### Power Plant with Startup Costs -```python -power_plant = OnOffParameters( - effects_per_switch_on={'startup_cost': 25000}, # €25k per startup - effects_per_running_hour={'fixed_om': 125}, # €125/hour while running - consecutive_on_hours_min=8, # Minimum 8-hour run - consecutive_off_hours_min=4, # 4-hour cooling period - on_hours_max=6000, # Annual limit -) -``` - -### Batch Process with Cycling Limits -```python -batch_reactor = OnOffParameters( - effects_per_switch_on={'setup_cost': 1500}, - consecutive_on_hours_min=12, # 12-hour minimum batch - consecutive_on_hours_max=24, # 24-hour maximum batch - consecutive_off_hours_min=6, # Cleaning time - switch_on_max=200, # Max 200 batches -) -``` - -### HVAC with Cycle Prevention -```python -hvac = OnOffParameters( - effects_per_switch_on={'compressor_wear': 0.5}, - consecutive_on_hours_min=1, # Prevent short cycling - consecutive_off_hours_min=0.5, # 30-min minimum off - switch_on_max=2000, # Limit compressor starts -) -``` - -### Backup Generator with Testing Requirements -```python -backup_gen = OnOffParameters( - effects_per_switch_on={'fuel_priming': 50}, # L diesel - consecutive_on_hours_min=0.5, # 30-min test duration - consecutive_off_hours_max=720, # Test every 30 days - on_hours_min=26, # Weekly testing requirement -) -``` - ---- - -## Notes - -**Time Series Boundary:** The final time period constraints for consecutive_on_hours_min/max and consecutive_off_hours_min/max are not enforced at the end of the planning horizon. This allows optimization to end with ongoing campaigns that may be shorter/longer than specified, as they extend beyond the modeled period. diff --git a/docs/user-guide/mathematical-notation/features/StatusParameters.md b/docs/user-guide/mathematical-notation/features/StatusParameters.md new file mode 100644 index 000000000..2ec34e3df --- /dev/null +++ b/docs/user-guide/mathematical-notation/features/StatusParameters.md @@ -0,0 +1,317 @@ +# StatusParameters + +[`StatusParameters`][flixopt.interface.StatusParameters] model equipment that operates in discrete active/inactive states rather than continuous operation. This captures realistic operational constraints including startup costs, minimum run times, cycling limitations, and maintenance scheduling. + +## Binary State Variable + +Equipment operation is modeled using a binary state variable: + +$$\label{eq:status_state} +s(t) \in \{0, 1\} \quad \forall t +$$ + +With: + +- $s(t) = 1$: equipment is operating (active state) +- $s(t) = 0$: equipment is shutdown (inactive state) + +This state variable controls the equipment's operational constraints and modifies flow bounds using the **bounds with state** pattern from [Bounds and States](../modeling-patterns/bounds-and-states.md#bounds-with-state). + +--- + +## State Transitions and Switching + +State transitions are tracked using switch variables (see [State Transitions](../modeling-patterns/state-transitions.md#binary-state-transitions)): + +$$\label{eq:status_transitions} +s^\text{startup}(t) - s^\text{shutdown}(t) = s(t) - s(t-1) \quad \forall t > 0 +$$ + +$$\label{eq:status_switch_exclusivity} +s^\text{startup}(t) + s^\text{shutdown}(t) \leq 1 \quad \forall t +$$ + +With: + +- $s^\text{startup}(t) \in \{0, 1\}$: equals 1 when switching from inactive to active (startup) +- $s^\text{shutdown}(t) \in \{0, 1\}$: equals 1 when switching from active to inactive (shutdown) + +**Behavior:** +- Inactive → Active: $s^\text{startup}(t) = 1, s^\text{shutdown}(t) = 0$ +- Active → Inactive: $s^\text{startup}(t) = 0, s^\text{shutdown}(t) = 1$ +- No change: $s^\text{startup}(t) = 0, s^\text{shutdown}(t) = 0$ + +--- + +## Effects and Costs + +### Startup Effects + +Effects incurred when equipment starts up: + +$$\label{eq:status_switch_effects} +E_{e,\text{switch}} = \sum_{t} s^\text{startup}(t) \cdot \text{effect}_{e,\text{switch}} +$$ + +With: + +- $\text{effect}_{e,\text{switch}}$ being the effect value per startup event + +**Examples:** +- Startup fuel consumption +- Wear and tear costs +- Labor costs for startup procedures +- Inrush power demands + +--- + +### Running Effects + +Effects incurred while equipment is operating: + +$$\label{eq:status_running_effects} +E_{e,\text{run}} = \sum_{t} s(t) \cdot \Delta t \cdot \text{effect}_{e,\text{run}} +$$ + +With: + +- $\text{effect}_{e,\text{run}}$ being the effect rate per operating hour +- $\Delta t$ being the time step duration + +**Examples:** +- Fixed operating and maintenance costs +- Auxiliary power consumption +- Consumable materials +- Emissions while running + +--- + +## Operating Hour Constraints + +### Total Operating Hours + +Bounds on total operating time across the planning horizon: + +$$\label{eq:status_total_hours} +h_\text{min} \leq \sum_{t} s(t) \cdot \Delta t \leq h_\text{max} +$$ + +With: + +- $h_\text{min}$ being the minimum total operating hours +- $h_\text{max}$ being the maximum total operating hours + +**Use cases:** +- Minimum runtime requirements (contracts, maintenance) +- Maximum runtime limits (fuel availability, permits, equipment life) + +--- + +### Consecutive Operating Hours + +**Minimum Consecutive Uptime:** + +Enforces minimum runtime once started using duration tracking (see [Duration Tracking](../modeling-patterns/duration-tracking.md#minimum-duration-constraints)): + +$$\label{eq:status_min_uptime} +d^\text{uptime}(t) \geq (s(t-1) - s(t)) \cdot h^\text{uptime}_\text{min} \quad \forall t > 0 +$$ + +With: + +- $d^\text{uptime}(t)$ being the consecutive uptime duration at time $t$ +- $h^\text{uptime}_\text{min}$ being the minimum required uptime + +**Behavior:** +- When shutting down at time $t$: enforces equipment was on for at least $h^\text{uptime}_\text{min}$ prior to the switch +- Prevents short cycling and frequent startups + +**Maximum Consecutive Uptime:** + +Limits continuous operation before requiring shutdown: + +$$\label{eq:status_max_uptime} +d^\text{uptime}(t) \leq h^\text{uptime}_\text{max} \quad \forall t +$$ + +**Use cases:** +- Mandatory maintenance intervals +- Process batch time limits +- Thermal cycling requirements + +--- + +### Consecutive Shutdown Hours + +**Minimum Consecutive Downtime:** + +Enforces minimum shutdown duration before restarting: + +$$\label{eq:status_min_downtime} +d^\text{downtime}(t) \geq (s(t) - s(t-1)) \cdot h^\text{downtime}_\text{min} \quad \forall t > 0 +$$ + +With: + +- $d^\text{downtime}(t)$ being the consecutive downtime duration at time $t$ +- $h^\text{downtime}_\text{min}$ being the minimum required downtime + +**Use cases:** +- Cooling periods +- Maintenance requirements +- Process stabilization + +**Maximum Consecutive Downtime:** + +Limits shutdown duration before mandatory restart: + +$$\label{eq:status_max_downtime} +d^\text{downtime}(t) \leq h^\text{downtime}_\text{max} \quad \forall t +$$ + +**Use cases:** +- Equipment preservation requirements +- Process stability needs +- Contractual minimum activity levels + +--- + +## Cycling Limits + +Maximum number of startups across the planning horizon: + +$$\label{eq:status_max_switches} +\sum_{t} s^\text{startup}(t) \leq n_\text{max} +$$ + +With: + +- $n_\text{max}$ being the maximum allowed number of startups + +**Use cases:** +- Preventing excessive equipment wear +- Grid stability requirements +- Operational complexity limits +- Maintenance budget constraints + +--- + +## Integration with Flow Bounds + +StatusParameters modify flow rate bounds by coupling them to the active/inactive state. + +**Without StatusParameters** (continuous operation): +$$ +P \cdot \text{rel}_\text{lower} \leq p(t) \leq P \cdot \text{rel}_\text{upper} +$$ + +**With StatusParameters** (binary operation): +$$ +s(t) \cdot P \cdot \max(\varepsilon, \text{rel}_\text{lower}) \leq p(t) \leq s(t) \cdot P \cdot \text{rel}_\text{upper} +$$ + +Using the **bounds with state** pattern from [Bounds and States](../modeling-patterns/bounds-and-states.md#bounds-with-state). + +**Behavior:** +- When $s(t) = 0$: flow is forced to zero +- When $s(t) = 1$: flow follows normal bounds + +--- + +## Complete Formulation Summary + +For equipment with StatusParameters, the complete constraint system includes: + +1. **State variable:** $s(t) \in \{0, 1\}$ +2. **Switch tracking:** $s^\text{startup}(t) - s^\text{shutdown}(t) = s(t) - s(t-1)$ +3. **Switch exclusivity:** $s^\text{startup}(t) + s^\text{shutdown}(t) \leq 1$ +4. **Duration tracking:** + + - On-duration: $d^\text{uptime}(t)$ following duration tracking pattern + - Off-duration: $d^\text{downtime}(t)$ following duration tracking pattern +5. **Minimum uptime:** $d^\text{uptime}(t) \geq (s(t-1) - s(t)) \cdot h^\text{uptime}_\text{min}$ +6. **Maximum uptime:** $d^\text{uptime}(t) \leq h^\text{uptime}_\text{max}$ +7. **Minimum downtime:** $d^\text{downtime}(t) \geq (s(t) - s(t-1)) \cdot h^\text{downtime}_\text{min}$ +8. **Maximum downtime:** $d^\text{downtime}(t) \leq h^\text{downtime}_\text{max}$ +9. **Total hours:** $h_\text{min} \leq \sum_t s(t) \cdot \Delta t \leq h_\text{max}$ +10. **Cycling limit:** $\sum_t s^\text{startup}(t) \leq n_\text{max}$ +11. **Flow bounds:** $s(t) \cdot P \cdot \max(\varepsilon, \text{rel}_\text{lower}) \leq p(t) \leq s(t) \cdot P \cdot \text{rel}_\text{upper}$ + +--- + +## Implementation + +**Python Class:** [`StatusParameters`][flixopt.interface.StatusParameters] + +**Key Parameters:** + +- `effects_per_startup`: Costs per startup event +- `effects_per_active_hour`: Costs per hour of operation +- `active_hours_min`, `active_hours_max`: Total runtime bounds +- `min_uptime`, `max_uptime`: Consecutive runtime bounds +- `min_downtime`, `max_downtime`: Consecutive shutdown bounds +- `startup_limit`: Maximum number of startups +- `force_startup_tracking`: Create switch variables even without limits (for tracking) + +See the [`StatusParameters`][flixopt.interface.StatusParameters] API documentation for complete parameter list and usage examples. + +**Mathematical Patterns Used:** +- [State Transitions](../modeling-patterns/state-transitions.md#binary-state-transitions) - Switch tracking +- [Duration Tracking](../modeling-patterns/duration-tracking.md) - Consecutive time constraints +- [Bounds with State](../modeling-patterns/bounds-and-states.md#bounds-with-state) - Flow control + +**Used in:** +- [`Flow`][flixopt.elements.Flow] - Active/inactive operation for flows +- All components supporting discrete operational states + +--- + +## Examples + +### Power Plant with Startup Costs +```python +power_plant = StatusParameters( + effects_per_startup={'startup_cost': 25000}, # €25k per startup + effects_per_active_hour={'fixed_om': 125}, # €125/hour while running + min_uptime=8, # Minimum 8-hour run + min_downtime=4, # 4-hour cooling period + active_hours_max=6000, # Annual limit +) +``` + +### Batch Process with Cycling Limits +```python +batch_reactor = StatusParameters( + effects_per_startup={'setup_cost': 1500}, + min_uptime=12, # 12-hour minimum batch + max_uptime=24, # 24-hour maximum batch + min_downtime=6, # Cleaning time + startup_limit=200, # Max 200 batches +) +``` + +### HVAC with Cycle Prevention +```python +hvac = StatusParameters( + effects_per_startup={'compressor_wear': 0.5}, + min_uptime=1, # Prevent short cycling + min_downtime=0.5, # 30-min minimum off + startup_limit=2000, # Limit compressor starts +) +``` + +### Backup Generator with Testing Requirements +```python +backup_gen = StatusParameters( + effects_per_startup={'fuel_priming': 50}, # L diesel + min_uptime=0.5, # 30-min test duration + max_downtime=720, # Test every 30 days + active_hours_min=26, # Weekly testing requirement +) +``` + +--- + +## Notes + +**Time Series Boundary:** The final time period constraints for min_uptime/max and min_downtime/max are not enforced at the end of the planning horizon. This allows optimization to end with ongoing campaigns that may be shorter/longer than specified, as they extend beyond the modeled period. diff --git a/docs/user-guide/mathematical-notation/index.md b/docs/user-guide/mathematical-notation/index.md index 27e7b7e9a..4512820f3 100644 --- a/docs/user-guide/mathematical-notation/index.md +++ b/docs/user-guide/mathematical-notation/index.md @@ -56,10 +56,10 @@ Mathematical formulations for core FlixOpt elements (corresponding to [`flixopt. Mathematical formulations for optional features (corresponding to parameters in FlixOpt classes): - [InvestParameters](features/InvestParameters.md) - Investment decision modeling -- [OnOffParameters](features/OnOffParameters.md) - Binary on/off operation +- [StatusParameters](features/StatusParameters.md) - Binary active/inactive operation - [Piecewise](features/Piecewise.md) - Piecewise linear approximations -**User API:** When you pass `invest_parameters` or `on_off_parameters` to a `Flow` or component, these formulations are applied. +**User API:** When you pass `invest_parameters` or `status_parameters` to a `Flow` or component, these formulations are applied. ### System-Level - [Effects, Penalty & Objective](effects-penalty-objective.md) - Cost allocation and objective function @@ -97,7 +97,7 @@ Mathematical formulations for optional features (corresponding to parameters in | Concept | Documentation | Python Class | |---------|---------------|--------------| | **Binary investment** | [InvestParameters](features/InvestParameters.md) | [`InvestParameters`][flixopt.interface.InvestParameters] | -| **On/off operation** | [OnOffParameters](features/OnOffParameters.md) | [`OnOffParameters`][flixopt.interface.OnOffParameters] | +| **On/off operation** | [StatusParameters](features/StatusParameters.md) | [`StatusParameters`][flixopt.interface.StatusParameters] | | **Piecewise segments** | [Piecewise](features/Piecewise.md) | [`Piecewise`][flixopt.interface.Piecewise] | ### Modeling Patterns Cross-Reference @@ -119,5 +119,5 @@ Mathematical formulations for optional features (corresponding to parameters in | `Storage` | [Storage](elements/Storage.md) | [`Storage`][flixopt.components.Storage] | | `LinearConverter` | [LinearConverter](elements/LinearConverter.md) | [`LinearConverter`][flixopt.components.LinearConverter] | | `InvestParameters` | [InvestParameters](features/InvestParameters.md) | [`InvestParameters`][flixopt.interface.InvestParameters] | -| `OnOffParameters` | [OnOffParameters](features/OnOffParameters.md) | [`OnOffParameters`][flixopt.interface.OnOffParameters] | +| `StatusParameters` | [StatusParameters](features/StatusParameters.md) | [`StatusParameters`][flixopt.interface.StatusParameters] | | `Piecewise` | [Piecewise](features/Piecewise.md) | [`Piecewise`][flixopt.interface.Piecewise] | diff --git a/docs/user-guide/mathematical-notation/modeling-patterns/bounds-and-states.md b/docs/user-guide/mathematical-notation/modeling-patterns/bounds-and-states.md index d5821948f..18235e50d 100644 --- a/docs/user-guide/mathematical-notation/modeling-patterns/bounds-and-states.md +++ b/docs/user-guide/mathematical-notation/modeling-patterns/bounds-and-states.md @@ -11,6 +11,7 @@ $$\label{eq:basic_bounds} $$ With: + - $v$ being the optimization variable - $\text{lower}$ being the lower bound (constant or time-dependent) - $\text{upper}$ being the upper bound (constant or time-dependent) @@ -25,13 +26,14 @@ With: ## Bounds with State -When a variable should only be non-zero if a binary state variable is active (e.g., on/off operation, investment decisions), the bounds are controlled by the state: +When a variable should only be non-zero if a binary state variable is active (e.g., active/inactive operation, investment decisions), the bounds are controlled by the state: $$\label{eq:bounds_with_state} s \cdot \max(\varepsilon, \text{lower}) \leq v \leq s \cdot \text{upper} $$ With: + - $v$ being the optimization variable - $s \in \{0, 1\}$ being the binary state variable - $\text{lower}$ being the lower bound when active @@ -45,7 +47,7 @@ With: **Implementation:** [`BoundingPatterns.bounds_with_state()`][flixopt.modeling.BoundingPatterns.bounds_with_state] **Used in:** -- Flow rates with on/off operation (see [OnOffParameters](../features/OnOffParameters.md)) +- Flow rates with active/inactive operation (see [StatusParameters](../features/StatusParameters.md)) - Investment size decisions (see [InvestParameters](../features/InvestParameters.md)) --- @@ -59,6 +61,7 @@ v_\text{scale} \cdot \text{rel}_\text{lower} \leq v \leq v_\text{scale} \cdot \t $$ With: + - $v$ being the optimization variable (e.g., flow rate) - $v_\text{scale}$ being the scaling variable (e.g., component size) - $\text{rel}_\text{lower}$ being the relative lower bound factor (typically 0) @@ -78,7 +81,7 @@ With: ## Scaled Bounds with State -Combining scaled bounds with binary state control requires a Big-M formulation to handle both the scaling and the on/off behavior: +Combining scaled bounds with binary state control requires a Big-M formulation to handle both the scaling and the active/inactive behavior: $$\label{eq:scaled_bounds_with_state_1} (s - 1) \cdot M_\text{misc} + v_\text{scale} \cdot \text{rel}_\text{lower} \leq v \leq v_\text{scale} \cdot \text{rel}_\text{upper} @@ -89,6 +92,7 @@ s \cdot M_\text{lower} \leq v \leq s \cdot M_\text{upper} $$ With: + - $v$ being the optimization variable - $v_\text{scale}$ being the scaling variable - $s \in \{0, 1\}$ being the binary state variable @@ -107,8 +111,8 @@ Where $v_\text{scale,max}$ and $v_\text{scale,min}$ are the maximum and minimum **Implementation:** [`BoundingPatterns.scaled_bounds_with_state()`][flixopt.modeling.BoundingPatterns.scaled_bounds_with_state] **Used in:** -- Flow rates with on/off operation and investment sizing -- Components combining [OnOffParameters](../features/OnOffParameters.md) and [InvestParameters](../features/InvestParameters.md) +- Flow rates with active/inactive operation and investment sizing +- Components combining [StatusParameters](../features/StatusParameters.md) and [InvestParameters](../features/InvestParameters.md) --- @@ -127,6 +131,7 @@ $$\label{eq:expression_tracking_bounds} $$ With: + - $v_\text{tracker}$ being the auxiliary tracking variable - $\text{expression}$ being a linear expression of other variables - $\text{lower}, \text{upper}$ being optional bounds on the tracker @@ -149,6 +154,7 @@ $$\label{eq:mutual_exclusivity} $$ With: + - $s_i(t) \in \{0, 1\}$ being binary state variables - $\text{tolerance}$ being the maximum number of simultaneously active states (typically 1) - $t$ being the time index diff --git a/docs/user-guide/mathematical-notation/modeling-patterns/duration-tracking.md b/docs/user-guide/mathematical-notation/modeling-patterns/duration-tracking.md index 5d430d28c..2d6f46ed1 100644 --- a/docs/user-guide/mathematical-notation/modeling-patterns/duration-tracking.md +++ b/docs/user-guide/mathematical-notation/modeling-patterns/duration-tracking.md @@ -15,6 +15,7 @@ d(t) \leq s(t) \cdot M \quad \forall t $$ With: + - $d(t)$ being the duration variable (continuous, non-negative) - $s(t) \in \{0, 1\}$ being the binary state variable - $M$ being a sufficiently large constant (big-M) @@ -38,6 +39,7 @@ d(t+1) \geq d(t) + \Delta d(t) + (s(t+1) - 1) \cdot M \quad \forall t $$ With: + - $\Delta d(t)$ being the duration increment for time step $t$ (typically $\Delta t_i$ from the time series) - $M$ being a sufficiently large constant @@ -56,6 +58,7 @@ d(0) = (\Delta d(0) + d_\text{prev}) \cdot s(0) $$ With: + - $d_\text{prev}$ being the duration from before the optimization period - $\Delta d(0)$ being the duration increment for the first time step @@ -89,6 +92,7 @@ d(t) \geq (s(t-1) - s(t)) \cdot d_\text{min}(t-1) \quad \forall t > 0 $$ With: + - $d_\text{min}(t)$ being the required minimum duration at time $t$ **Behavior:** @@ -116,7 +120,7 @@ Ensuring equipment runs for a minimum duration once started: # State: 1 when running, 0 when off # Require at least 2 hours of operation duration = modeling.consecutive_duration_tracking( - state_variable=on_state, + state=on_state, duration_per_step=time_step_hours, minimum_duration=2.0 ) @@ -129,7 +133,7 @@ Tracking time since startup for gradual ramp-up constraints: ```python # Track startup duration startup_duration = modeling.consecutive_duration_tracking( - state_variable=on_state, + state=on_state, duration_per_step=time_step_hours ) # Constrain output based on startup duration @@ -143,7 +147,7 @@ Tracking time in a state before allowing transitions: ```python # Track maintenance duration maintenance_duration = modeling.consecutive_duration_tracking( - state_variable=maintenance_state, + state=maintenance_state, duration_per_step=time_step_hours, minimum_duration=scheduled_maintenance_hours ) @@ -154,6 +158,7 @@ maintenance_duration = modeling.consecutive_duration_tracking( ## Used In This pattern is used in: -- [`OnOffParameters`](../features/OnOffParameters.md) - Minimum on/off times + +- [`StatusParameters`](../features/StatusParameters.md) - Minimum active/inactive times - Operating mode constraints with minimum durations - Startup/shutdown sequence modeling diff --git a/docs/user-guide/mathematical-notation/modeling-patterns/index.md b/docs/user-guide/mathematical-notation/modeling-patterns/index.md index 15ff8dbd2..ab347eb39 100644 --- a/docs/user-guide/mathematical-notation/modeling-patterns/index.md +++ b/docs/user-guide/mathematical-notation/modeling-patterns/index.md @@ -17,7 +17,7 @@ The modeling patterns are organized into three categories: These patterns define how optimization variables are constrained within bounds: - **Basic Bounds** - Simple upper and lower bounds on variables -- **Bounds with State** - Binary-controlled bounds (on/off states) +- **Bounds with State** - Binary-controlled bounds (active/inactive states) - **Scaled Bounds** - Bounds dependent on another variable (e.g., size) - **Scaled Bounds with State** - Combination of scaling and binary control @@ -43,7 +43,7 @@ These patterns are used throughout FlixOpt components: - [`Flow`][flixopt.elements.Flow] uses **scaled bounds with state** for flow rate constraints - [`Storage`][flixopt.components.Storage] uses **basic bounds** for charge state -- [`OnOffParameters`](../features/OnOffParameters.md) uses **state transitions** for startup/shutdown +- [`StatusParameters`](../features/StatusParameters.md) uses **state transitions** for startup/shutdown - [`InvestParameters`](../features/InvestParameters.md) uses **bounds with state** for investment decisions ## Implementation diff --git a/docs/user-guide/mathematical-notation/modeling-patterns/state-transitions.md b/docs/user-guide/mathematical-notation/modeling-patterns/state-transitions.md index dc75a8008..cf6cfe736 100644 --- a/docs/user-guide/mathematical-notation/modeling-patterns/state-transitions.md +++ b/docs/user-guide/mathematical-notation/modeling-patterns/state-transitions.md @@ -9,6 +9,7 @@ For a binary state variable $s(t) \in \{0, 1\}$, state transitions track when th ### Switch Variables Two binary variables track the transitions: + - $s^\text{on}(t) \in \{0, 1\}$: equals 1 when switching from off to on - $s^\text{off}(t) \in \{0, 1\}$: equals 1 when switching from on to off @@ -25,6 +26,7 @@ s^\text{on}(0) - s^\text{off}(0) = s(0) - s_\text{prev} $$ With: + - $s(t)$ being the binary state variable - $s_\text{prev}$ being the state before the optimization period - $s^\text{on}(t), s^\text{off}(t)$ being the switch variables @@ -45,8 +47,9 @@ s^\text{on}(t) + s^\text{off}(t) \leq 1 \quad \forall t $$ This ensures: + - At most one switch event per time step -- No simultaneous on/off switching +- No simultaneous active/inactive switching --- @@ -80,6 +83,7 @@ $$\label{eq:continuous_transition_initial} $$ With: + - $v(t)$ being the continuous variable - $v_\text{prev}$ being the value before the optimization period - $\Delta v^\text{max}$ being the maximum allowed change @@ -110,6 +114,7 @@ $$\label{eq:level_evolution} $$ With: + - $\ell(t)$ being the level variable - $\ell_\text{init}$ being the initial level - $\ell^\text{inc}(t)$ being the increase in level at time $t$ (non-negative) @@ -130,6 +135,7 @@ $$\label{eq:decrease_bound} $$ With: + - $\Delta \ell^\text{max}$ being the maximum change per time step - $b^\text{inc}(t), b^\text{dec}(t) \in \{0, 1\}$ being binary control variables @@ -144,6 +150,7 @@ b^\text{inc}(t) + b^\text{dec}(t) \leq 1 \quad \forall t $$ This ensures: + - Level can only increase OR decrease (or stay constant) in each time step - No simultaneous contradictory changes @@ -174,14 +181,14 @@ Track startup and shutdown events to apply costs: ```python # Create switch variables -switch_on, switch_off = modeling.state_transition_bounds( - state_variable=on_state, +startup, shutdown = modeling.state_transition_bounds( + state=on_state, previous_state=previous_on_state ) # Apply costs to switches -startup_cost = switch_on * startup_cost_per_event -shutdown_cost = switch_off * shutdown_cost_per_event +startup_cost = startup * startup_cost_per_event +shutdown_cost = shutdown * shutdown_cost_per_event ``` ### Limited Switching @@ -190,13 +197,13 @@ Restrict the number of state changes: ```python # Track all switches -switch_on, switch_off = modeling.state_transition_bounds( - state_variable=on_state +startup, shutdown = modeling.state_transition_bounds( + state=on_state ) # Limit total switches model.add_constraint( - (switch_on + switch_off).sum() <= max_switches + (startup + shutdown).sum() <= max_switches ) ``` @@ -221,7 +228,8 @@ model.add_constraint(increase.sum() <= max_total_expansion) ## Used In These patterns are used in: -- [`OnOffParameters`](../features/OnOffParameters.md) - Startup/shutdown tracking and costs + +- [`StatusParameters`](../features/StatusParameters.md) - Startup/shutdown tracking and costs - Operating mode switching with transition costs - Investment planning with staged capacity additions - Inventory management with controlled stock changes diff --git a/docs/user-guide/recipes/index.md b/docs/user-guide/recipes/index.md index 8ac7d1812..0317b2c70 100644 --- a/docs/user-guide/recipes/index.md +++ b/docs/user-guide/recipes/index.md @@ -28,7 +28,7 @@ Unlike full examples, recipes will be focused snippets showing a single concept. - **Data Manipulation** - Common xarray operations for parameterization and analysis - **Investment Optimization** - Size optimization strategies - **Renewable Integration** - Solar, wind capacity optimization -- **On/Off Constraints** - Minimum runtime, startup costs +- **Status Constraints** - Minimum runtime, startup costs - **Large-Scale Problems** - Segmented and aggregated calculations - **Custom Constraints** - Extend models with linopy - **Domain-Specific Patterns** - District heating, microgrids, industrial processes diff --git a/examples/02_Complex/complex_example.py b/examples/02_Complex/complex_example.py index 2913f643f..b86c0e9de 100644 --- a/examples/02_Complex/complex_example.py +++ b/examples/02_Complex/complex_example.py @@ -47,12 +47,12 @@ # --- Define Components --- # 1. Define Boiler Component - # A gas boiler that converts fuel into thermal output, with investment and on-off parameters + # A gas boiler that converts fuel into thermal output, with investment and on-inactive parameters Gaskessel = fx.linear_converters.Boiler( 'Kessel', thermal_efficiency=0.5, # Efficiency ratio - on_off_parameters=fx.OnOffParameters( - effects_per_running_hour={Costs.label: 0, CO2.label: 1000} + status_parameters=fx.StatusParameters( + effects_per_active_hour={Costs.label: 0, CO2.label: 1000} ), # CO2 emissions per hour thermal_flow=fx.Flow( label='Q_th', # Thermal output @@ -69,14 +69,14 @@ relative_maximum=1, # Maximum part load previous_flow_rate=50, # Previous flow rate flow_hours_max=1e6, # Total energy flow limit - on_off_parameters=fx.OnOffParameters( - on_hours_min=0, # Minimum operating hours - on_hours_max=1000, # Maximum operating hours - consecutive_on_hours_max=10, # Max consecutive operating hours - consecutive_on_hours_min=np.array([1, 1, 1, 1, 1, 2, 2, 2, 2]), # min consecutive operation hours - consecutive_off_hours_max=10, # Max consecutive off hours - effects_per_switch_on=0.01, # Cost per switch-on - switch_on_max=1000, # Max number of starts + status_parameters=fx.StatusParameters( + active_hours_min=0, # Minimum operating hours + active_hours_max=1000, # Maximum operating hours + max_uptime=10, # Max consecutive operating hours + min_uptime=np.array([1, 1, 1, 1, 1, 2, 2, 2, 2]), # min consecutive operation hours + max_downtime=10, # Max consecutive inactive hours + effects_per_startup={Costs.label: 0.01}, # Cost per startup + startup_limit=1000, # Max number of starts ), ), fuel_flow=fx.Flow(label='Q_fu', bus='Gas', size=200), @@ -88,7 +88,7 @@ 'BHKW2', thermal_efficiency=0.5, electrical_efficiency=0.4, - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=0.01), + status_parameters=fx.StatusParameters(effects_per_startup={Costs.label: 0.01}), electrical_flow=fx.Flow('P_el', bus='Strom', size=60, relative_minimum=5 / 60), thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=1e3), fuel_flow=fx.Flow('Q_fu', bus='Gas', size=1e3, previous_flow_rate=20), # The CHP was ON previously @@ -112,7 +112,7 @@ inputs=[Q_fu], outputs=[P_el, Q_th], piecewise_conversion=piecewise_conversion, - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=0.01), + status_parameters=fx.StatusParameters(effects_per_startup={Costs.label: 0.01}), ) # 4. Define Storage Component diff --git a/examples/02_Complex/complex_example_results.py b/examples/02_Complex/complex_example_results.py index 7f1123a26..c4e9bb4f2 100644 --- a/examples/02_Complex/complex_example_results.py +++ b/examples/02_Complex/complex_example_results.py @@ -29,8 +29,8 @@ bus.plot_node_balance(show=False, save=f'results/{bus.label}--balance.html') # --- Plotting internal variables manually --- - results.plot_heatmap('BHKW2(Q_th)|on') - results.plot_heatmap('Kessel(Q_th)|on') + results.plot_heatmap('BHKW2(Q_th)|status') + results.plot_heatmap('Kessel(Q_th)|status') # Dataframes from results: fw_bus = results['Fernwärme'].node_balance().to_dataframe() diff --git a/examples/03_Optimization_modes/example_optimization_modes.py b/examples/03_Optimization_modes/example_optimization_modes.py index d3ae566e4..009c008d9 100644 --- a/examples/03_Optimization_modes/example_optimization_modes.py +++ b/examples/03_Optimization_modes/example_optimization_modes.py @@ -91,7 +91,7 @@ def get_solutions(optimizations: list, variable: str) -> xr.Dataset: size=95, relative_minimum=12 / 95, previous_flow_rate=20, - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=1000), + status_parameters=fx.StatusParameters(effects_per_startup=1000), ), ) @@ -100,7 +100,7 @@ def get_solutions(optimizations: list, variable: str) -> xr.Dataset: 'BHKW2', thermal_efficiency=0.58, electrical_efficiency=0.22, - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=24000), + status_parameters=fx.StatusParameters(effects_per_startup=24000), electrical_flow=fx.Flow('P_el', bus='Strom', size=200), thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=200), fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=288, relative_minimum=87 / 288, previous_flow_rate=100), diff --git a/examples/04_Scenarios/scenario_example.py b/examples/04_Scenarios/scenario_example.py index 6ae01c4f0..672df5c7f 100644 --- a/examples/04_Scenarios/scenario_example.py +++ b/examples/04_Scenarios/scenario_example.py @@ -123,7 +123,7 @@ size=50, relative_minimum=0.1, relative_maximum=1, - on_off_parameters=fx.OnOffParameters(), + status_parameters=fx.StatusParameters(), ), fuel_flow=fx.Flow(label='Q_fu', bus='Gas'), ) @@ -135,7 +135,7 @@ thermal_efficiency=0.48, # Realistic thermal efficiency (48%) electrical_efficiency=0.40, # Realistic electrical efficiency (40%) electrical_flow=fx.Flow( - 'P_el', bus='Strom', size=60, relative_minimum=5 / 60, on_off_parameters=fx.OnOffParameters() + 'P_el', bus='Strom', size=60, relative_minimum=5 / 60, status_parameters=fx.StatusParameters() ), thermal_flow=fx.Flow('Q_th', bus='Fernwärme'), fuel_flow=fx.Flow('Q_fu', bus='Gas'), diff --git a/examples/05_Two-stage-optimization/two_stage_optimization.py b/examples/05_Two-stage-optimization/two_stage_optimization.py index d8f4e87fe..9e102c44f 100644 --- a/examples/05_Two-stage-optimization/two_stage_optimization.py +++ b/examples/05_Two-stage-optimization/two_stage_optimization.py @@ -57,16 +57,14 @@ ), relative_minimum=0.2, previous_flow_rate=20, - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=300), + status_parameters=fx.StatusParameters(effects_per_startup=300), ), ), fx.linear_converters.CHP( 'BHKW2', thermal_efficiency=0.58, electrical_efficiency=0.22, - on_off_parameters=fx.OnOffParameters( - effects_per_switch_on=1_000, consecutive_on_hours_min=10, consecutive_off_hours_min=10 - ), + status_parameters=fx.StatusParameters(effects_per_startup=1_000, min_uptime=10, min_downtime=10), electrical_flow=fx.Flow('P_el', bus='Strom'), thermal_flow=fx.Flow('Q_th', bus='Fernwärme'), fuel_flow=fx.Flow( diff --git a/flixopt/__init__.py b/flixopt/__init__.py index 3941cb491..0f8fc73e2 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -31,7 +31,7 @@ from .effects import PENALTY_EFFECT_LABEL, Effect from .elements import Bus, Flow from .flow_system import FlowSystem -from .interface import InvestParameters, OnOffParameters, Piece, Piecewise, PiecewiseConversion, PiecewiseEffects +from .interface import InvestParameters, Piece, Piecewise, PiecewiseConversion, PiecewiseEffects, StatusParameters # Import new Optimization classes from .optimization import ClusteredOptimization, Optimization, SegmentedOptimization @@ -60,7 +60,7 @@ 'AggregatedCalculation', 'SegmentedCalculation', 'InvestParameters', - 'OnOffParameters', + 'StatusParameters', 'Piece', 'Piecewise', 'PiecewiseConversion', diff --git a/flixopt/components.py b/flixopt/components.py index 07bc5f204..2d04586cc 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -14,7 +14,7 @@ from .core import PlausibilityError from .elements import Component, ComponentModel, Flow from .features import InvestmentModel, PiecewiseModel -from .interface import InvestParameters, OnOffParameters, PiecewiseConversion +from .interface import InvestParameters, PiecewiseConversion, StatusParameters from .modeling import BoundingPatterns from .structure import FlowSystemModel, register_class_for_io @@ -48,9 +48,9 @@ class LinearConverter(Component): label: The label of the Element. Used to identify it in the FlowSystem. inputs: list of input Flows that feed into the converter. outputs: list of output Flows that are produced by the converter. - on_off_parameters: Information about on and off state of LinearConverter. - Component is On/Off if all connected Flows are On/Off. This induces an - On-Variable (binary) in all Flows! If possible, use OnOffParameters in a + status_parameters: Information about active and inactive state of LinearConverter. + Component is active/inactive if all connected Flows are active/inactive. This induces a + status variable (binary) in all Flows! If possible, use StatusParameters in a single Flow instead to keep the number of binary variables low. conversion_factors: Linear relationships between flows expressed as a list of dictionaries. Each dictionary maps flow labels to their coefficients in one @@ -167,12 +167,12 @@ def __init__( label: str, inputs: list[Flow], outputs: list[Flow], - on_off_parameters: OnOffParameters | None = None, + status_parameters: StatusParameters | None = None, conversion_factors: list[dict[str, Numeric_TPS]] | None = None, piecewise_conversion: PiecewiseConversion | None = None, meta_data: dict | None = None, ): - super().__init__(label, inputs, outputs, on_off_parameters, meta_data=meta_data) + super().__init__(label, inputs, outputs, status_parameters, meta_data=meta_data) self.conversion_factors = conversion_factors or [] self.piecewise_conversion = piecewise_conversion @@ -573,8 +573,8 @@ class Transmission(Component): relative_losses: Proportional losses as fraction of throughput (e.g., 0.02 for 2% loss). Applied as: output = input × (1 - relative_losses) absolute_losses: Fixed losses that occur when transmission is active. - Automatically creates binary variables for on/off states. - on_off_parameters: Parameters defining binary operation constraints and costs. + Automatically creates binary variables for active/inactive states. + status_parameters: Parameters defining binary operation constraints and costs. prevent_simultaneous_flows_in_both_directions: If True, prevents simultaneous flow in both directions. Increases binary variables but reflects physical reality for most transmission systems. Default is True. @@ -629,7 +629,7 @@ class Transmission(Component): ) ``` - Material conveyor with on/off operation: + Material conveyor with active/inactive status: ```python conveyor_belt = Transmission( @@ -637,10 +637,10 @@ class Transmission(Component): in1=loading_station, out1=unloading_station, absolute_losses=25, # 25 kW motor power when running - on_off_parameters=OnOffParameters( - effects_per_switch_on={'maintenance': 0.1}, - consecutive_on_hours_min=2, # Minimum 2-hour operation - switch_on_max=10, # Maximum 10 starts per day + status_parameters=StatusParameters( + effects_per_startup={'maintenance': 0.1}, + min_uptime=2, # Minimum 2-hour operation + startup_limit=10, # Maximum 10 starts per period ), ) ``` @@ -654,7 +654,7 @@ class Transmission(Component): When using InvestParameters on in1, the capacity automatically applies to in2 to maintain consistent bidirectional capacity without additional investment variables. - Absolute losses force the creation of binary on/off variables, which increases + Absolute losses force the creation of binary on/inactive variables, which increases computational complexity but enables realistic modeling of equipment with standby power consumption. @@ -671,7 +671,7 @@ def __init__( out2: Flow | None = None, relative_losses: Numeric_TPS | None = None, absolute_losses: Numeric_TPS | None = None, - on_off_parameters: OnOffParameters = None, + status_parameters: StatusParameters | None = None, prevent_simultaneous_flows_in_both_directions: bool = True, balanced: bool = False, meta_data: dict | None = None, @@ -680,7 +680,7 @@ def __init__( label, inputs=[flow for flow in (in1, in2) if flow is not None], outputs=[flow for flow in (out1, out2) if flow is not None], - on_off_parameters=on_off_parameters, + status_parameters=status_parameters, prevent_simultaneous_flows=None if in2 is None or prevent_simultaneous_flows_in_both_directions is False else [in1, in2], @@ -739,8 +739,8 @@ class TransmissionModel(ComponentModel): def __init__(self, model: FlowSystemModel, element: Transmission): if (element.absolute_losses is not None) and np.any(element.absolute_losses != 0): for flow in element.inputs + element.outputs: - if flow.on_off_parameters is None: - flow.on_off_parameters = OnOffParameters() + if flow.status_parameters is None: + flow.status_parameters = StatusParameters() super().__init__(model, element) @@ -772,8 +772,8 @@ def create_transmission_equation(self, name: str, in_flow: Flow, out_flow: Flow) short_name=name, ) - if self.element.absolute_losses is not None: - con_transmission.lhs += in_flow.submodel.on_off.on * self.element.absolute_losses + if (self.element.absolute_losses is not None) and np.any(self.element.absolute_losses != 0): + con_transmission.lhs += in_flow.submodel.status.status * self.element.absolute_losses return con_transmission @@ -807,7 +807,7 @@ def _do_modeling(self): ) else: - # TODO: Improve Inclusion of OnOffParameters. Instead of creating a Binary in every flow, the binary could only be part of the Piece itself + # TODO: Improve Inclusion of StatusParameters. Instead of creating a Binary in every flow, the binary could only be part of the Piece itself piecewise_conversion = { self.element.flows[flow].submodel.flow_rate.name: piecewise for flow, piecewise in self.element.piecewise_conversion.items() @@ -819,7 +819,7 @@ def _do_modeling(self): label_of_element=self.label_of_element, label_of_model=f'{self.label_of_element}', piecewise_variables=piecewise_conversion, - zero_point=self.on_off.on if self.on_off is not None else False, + zero_point=self.status.status if self.status is not None else False, dims=('time', 'period', 'scenario'), ), short_name='PiecewiseConversion', @@ -978,7 +978,7 @@ def _investment(self) -> InvestmentModel | None: @property def investment(self) -> InvestmentModel | None: - """OnOff feature""" + """Investment feature""" if 'investment' not in self.submodels: return None return self.submodels['investment'] diff --git a/flixopt/elements.py b/flixopt/elements.py index 17730bc98..f12dae4c4 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -14,8 +14,8 @@ from . import io as fx_io from .config import CONFIG, DEPRECATION_REMOVAL_VERSION from .core import PlausibilityError -from .features import InvestmentModel, OnOffModel -from .interface import InvestParameters, OnOffParameters +from .features import InvestmentModel, StatusModel +from .interface import InvestParameters, StatusParameters from .modeling import BoundingPatterns, ModelingPrimitives, ModelingUtilitiesAbstract from .structure import ( Element, @@ -58,9 +58,9 @@ class Component(Element): energy/material consumption by the component. outputs: list of output Flows leaving the component. These represent energy/material production by the component. - on_off_parameters: Defines binary operation constraints and costs when the - component has discrete on/off states. Creates binary variables for all - connected Flows. For better performance, prefer defining OnOffParameters + status_parameters: Defines binary operation constraints and costs when the + component has discrete active/inactive states. Creates binary variables for all + connected Flows. For better performance, prefer defining StatusParameters on individual Flows when possible. prevent_simultaneous_flows: list of Flows that cannot be active simultaneously. Creates binary variables to enforce mutual exclusivity. Use sparingly as @@ -70,13 +70,13 @@ class Component(Element): Note: Component operational state is determined by its connected Flows: - - Component is "on" if ANY of its Flows is active (flow_rate > 0) - - Component is "off" only when ALL Flows are inactive (flow_rate = 0) + - Component is "active" if ANY of its Flows is active (flow_rate > 0) + - Component is "inactive" only when ALL Flows are inactive (flow_rate = 0) Binary variables and constraints: - - on_off_parameters creates binary variables for ALL connected Flows + - status_parameters creates binary variables for ALL connected Flows - prevent_simultaneous_flows creates binary variables for specified Flows - - For better computational performance, prefer Flow-level OnOffParameters + - For better computational performance, prefer Flow-level StatusParameters Component is an abstract base class. In practice, use specialized subclasses: - LinearConverter: Linear input/output relationships @@ -91,14 +91,14 @@ def __init__( label: str, inputs: list[Flow] | None = None, outputs: list[Flow] | None = None, - on_off_parameters: OnOffParameters | None = None, + status_parameters: StatusParameters | None = None, prevent_simultaneous_flows: list[Flow] | None = None, meta_data: dict | None = None, ): super().__init__(label, meta_data=meta_data) self.inputs: list[Flow] = inputs or [] self.outputs: list[Flow] = outputs or [] - self.on_off_parameters = on_off_parameters + self.status_parameters = status_parameters self.prevent_simultaneous_flows: list[Flow] = prevent_simultaneous_flows or [] self._check_unique_flow_labels() @@ -114,15 +114,15 @@ def create_model(self, model: FlowSystemModel) -> ComponentModel: def _set_flow_system(self, flow_system) -> None: """Propagate flow_system reference to nested Interface objects and flows.""" super()._set_flow_system(flow_system) - if self.on_off_parameters is not None: - self.on_off_parameters._set_flow_system(flow_system) + if self.status_parameters is not None: + self.status_parameters._set_flow_system(flow_system) for flow in self.inputs + self.outputs: flow._set_flow_system(flow_system) def transform_data(self, name_prefix: str = '') -> None: prefix = '|'.join(filter(None, [name_prefix, self.label_full])) - if self.on_off_parameters is not None: - self.on_off_parameters.transform_data(prefix) + if self.status_parameters is not None: + self.status_parameters.transform_data(prefix) for flow in self.inputs + self.outputs: flow.transform_data() # Flow doesnt need the name_prefix @@ -314,7 +314,7 @@ class Flow(Element): between a Bus and a Component in a specific direction. The flow rate is the primary optimization variable, with constraints and costs defined through various parameters. Flows can have fixed or variable sizes, operational - constraints, and complex on/off behavior. + constraints, and complex on/inactive behavior. Key Concepts: **Flow Rate**: The instantaneous rate of energy/material transfer (optimization variable) [kW, m³/h, kg/h] @@ -324,7 +324,7 @@ class Flow(Element): Integration with Parameter Classes: - **InvestParameters**: Used for `size` when flow Size is an investment decision - - **OnOffParameters**: Used for `on_off_parameters` when flow has discrete states + - **StatusParameters**: Used for `status_parameters` when flow has discrete states Mathematical Formulation: See the complete mathematical model in the documentation: @@ -340,7 +340,7 @@ class Flow(Element): load_factor_max: Maximum average utilization (0-1). Default: 1. effects_per_flow_hour: Operational costs/impacts per flow-hour. Dict mapping effect names to values (e.g., {'cost': 45, 'CO2': 0.8}). - on_off_parameters: Binary operation constraints (OnOffParameters). Default: None. + status_parameters: Binary operation constraints (StatusParameters). Default: None. flow_hours_max: Maximum cumulative flow-hours per period. Alternative to load_factor_max. flow_hours_min: Minimum cumulative flow-hours per period. Alternative to load_factor_min. flow_hours_max_over_periods: Maximum weighted sum of flow-hours across ALL periods. @@ -349,7 +349,7 @@ class Flow(Element): Weighted by FlowSystem period weights. fixed_relative_profile: Predetermined pattern as fraction of size. Flow rate = size × fixed_relative_profile(t). - previous_flow_rate: Initial flow state for on/off dynamics. Default: None (off). + previous_flow_rate: Initial flow state for active/inactive status at model start. Default: None (inactive). meta_data: Additional info stored in results. Python native types only. Examples: @@ -386,13 +386,13 @@ class Flow(Element): label='heat_output', bus='heating_network', size=50, # 50 kW thermal - relative_minimum=0.3, # Minimum 15 kW output when on + relative_minimum=0.3, # Minimum 15 kW output when active effects_per_flow_hour={'electricity_cost': 25, 'maintenance': 2}, - on_off_parameters=OnOffParameters( - effects_per_switch_on={'startup_cost': 100, 'wear': 0.1}, - consecutive_on_hours_min=2, # Must run at least 2 hours - consecutive_off_hours_min=1, # Must stay off at least 1 hour - switch_on_max=200, # Maximum 200 starts per period + status_parameters=StatusParameters( + effects_per_startup={'startup_cost': 100, 'wear': 0.1}, + min_uptime=2, # Must run at least 2 hours + min_downtime=1, # Must stay inactive at least 1 hour + startup_limit=200, # Maximum 200 starts per period ), ) ``` @@ -428,7 +428,7 @@ class Flow(Element): limits across all periods. **Relative Bounds**: Set `relative_minimum > 0` only when equipment cannot - operate below that level. Use `on_off_parameters` for discrete on/off behavior. + operate below that level. Use `status_parameters` for discrete active/inactive behavior. **Fixed Profiles**: Use `fixed_relative_profile` for known exact patterns, `relative_maximum` for upper bounds on optimization variables. @@ -454,7 +454,7 @@ def __init__( relative_minimum: Numeric_TPS = 0, relative_maximum: Numeric_TPS = 1, effects_per_flow_hour: Effect_TPS | Numeric_TPS | None = None, - on_off_parameters: OnOffParameters | None = None, + status_parameters: StatusParameters | None = None, flow_hours_max: Numeric_PS | None = None, flow_hours_min: Numeric_PS | None = None, flow_hours_max_over_periods: Numeric_S | None = None, @@ -479,7 +479,7 @@ def __init__( self.flow_hours_min = flow_hours_min self.flow_hours_max_over_periods = flow_hours_max_over_periods self.flow_hours_min_over_periods = flow_hours_min_over_periods - self.on_off_parameters = on_off_parameters + self.status_parameters = status_parameters self.previous_flow_rate = previous_flow_rate @@ -507,8 +507,8 @@ def create_model(self, model: FlowSystemModel) -> FlowModel: def _set_flow_system(self, flow_system) -> None: """Propagate flow_system reference to nested Interface objects.""" super()._set_flow_system(flow_system) - if self.on_off_parameters is not None: - self.on_off_parameters._set_flow_system(flow_system) + if self.status_parameters is not None: + self.status_parameters._set_flow_system(flow_system) if isinstance(self.size, Interface): self.size._set_flow_system(flow_system) @@ -537,8 +537,8 @@ def transform_data(self, name_prefix: str = '') -> None: f'{prefix}|load_factor_min', self.load_factor_min, dims=['period', 'scenario'] ) - if self.on_off_parameters is not None: - self.on_off_parameters.transform_data(prefix) + if self.status_parameters is not None: + self.status_parameters.transform_data(prefix) if isinstance(self.size, InvestParameters): self.size.transform_data(prefix) else: @@ -558,17 +558,17 @@ def _plausibility_checks(self) -> None: f'the resulting flow_rate will be very high. To fix this, assign a size to the Flow {self}.' ) - if self.fixed_relative_profile is not None and self.on_off_parameters is not None: + if self.fixed_relative_profile is not None and self.status_parameters is not None: logger.warning( - f'Flow {self.label_full} has both a fixed_relative_profile and an on_off_parameters.' - f'This will allow the flow to be switched on and off, effectively differing from the fixed_flow_rate.' + f'Flow {self.label_full} has both a fixed_relative_profile and status_parameters.' + f'This will allow the flow to be switched active and inactive, effectively differing from the fixed_flow_rate.' ) - if np.any(self.relative_minimum > 0) and self.on_off_parameters is None: + if np.any(self.relative_minimum > 0) and self.status_parameters is None: logger.warning( - f'Flow {self.label_full} has a relative_minimum of {self.relative_minimum} and no on_off_parameters. ' - f'This prevents the Flow from switching off (flow_rate = 0). ' - f'Consider using on_off_parameters to allow the Flow to be switched on and off.' + f'Flow {self.label_full} has a relative_minimum of {self.relative_minimum} and no status_parameters. ' + f'This prevents the Flow from switching inactive (flow_rate = 0). ' + f'Consider using status_parameters to allow the Flow to be switched active and inactive.' ) if self.previous_flow_rate is not None: @@ -666,18 +666,18 @@ def _do_modeling(self): # Effects self._create_shares() - def _create_on_off_model(self): - on = self.add_variables(binary=True, short_name='on', coords=self._model.get_coords()) + def _create_status_model(self): + status = self.add_variables(binary=True, short_name='status', coords=self._model.get_coords()) self.add_submodels( - OnOffModel( + StatusModel( model=self._model, label_of_element=self.label_of_element, - parameters=self.element.on_off_parameters, - on_variable=on, - previous_states=self.previous_states, + parameters=self.element.status_parameters, + status=status, + previous_status=self.previous_status, label_of_model=self.label_of_element, ), - short_name='on_off', + short_name='status', ) def _create_investment_model(self): @@ -693,23 +693,23 @@ def _create_investment_model(self): def _constraint_flow_rate(self): """Create bounding constraints for flow_rate (models already created in _create_variables)""" - if not self.with_investment and not self.with_on_off: + if not self.with_investment and not self.with_status: # Most basic case. Already covered by direct variable bounds pass - elif self.with_on_off and not self.with_investment: - # OnOff, but no Investment - self._create_on_off_model() + elif self.with_status and not self.with_investment: + # Status, but no Investment + self._create_status_model() bounds = self.relative_flow_rate_bounds BoundingPatterns.bounds_with_state( self, variable=self.flow_rate, bounds=(bounds[0] * self.element.size, bounds[1] * self.element.size), - variable_state=self.on_off.on, + state=self.status.status, ) - elif self.with_investment and not self.with_on_off: - # Investment, but no OnOff + elif self.with_investment and not self.with_status: + # Investment, but no Status self._create_investment_model() BoundingPatterns.scaled_bounds( self, @@ -718,10 +718,10 @@ def _constraint_flow_rate(self): relative_bounds=self.relative_flow_rate_bounds, ) - elif self.with_investment and self.with_on_off: - # Investment and OnOff + elif self.with_investment and self.with_status: + # Investment and Status self._create_investment_model() - self._create_on_off_model() + self._create_status_model() BoundingPatterns.scaled_bounds_with_state( model=self, @@ -729,14 +729,14 @@ def _constraint_flow_rate(self): scaling_variable=self._investment.size, relative_bounds=self.relative_flow_rate_bounds, scaling_bounds=(self.element.size.minimum_or_fixed_size, self.element.size.maximum_or_fixed_size), - variable_state=self.on_off.on, + state=self.status.status, ) else: raise Exception('Not valid') @property - def with_on_off(self) -> bool: - return self.element.on_off_parameters is not None + def with_status(self) -> bool: + return self.element.status_parameters is not None @property def with_investment(self) -> bool: @@ -809,9 +809,9 @@ def absolute_flow_rate_bounds(self) -> tuple[xr.DataArray, xr.DataArray]: lb_relative, ub_relative = self.relative_flow_rate_bounds lb = 0 - if not self.with_on_off: + if not self.with_status: if not self.with_investment: - # Basic case without investment and without OnOff + # Basic case without investment and without Status lb = lb_relative * self.element.size elif self.with_investment and self.element.size.mandatory: # With mandatory Investment @@ -825,11 +825,11 @@ def absolute_flow_rate_bounds(self) -> tuple[xr.DataArray, xr.DataArray]: return lb, ub @property - def on_off(self) -> OnOffModel | None: - """OnOff feature""" - if 'on_off' not in self.submodels: + def status(self) -> StatusModel | None: + """Status feature""" + if 'status' not in self.submodels: return None - return self.submodels['on_off'] + return self.submodels['status'] @property def _investment(self) -> InvestmentModel | None: @@ -838,14 +838,14 @@ def _investment(self) -> InvestmentModel | None: @property def investment(self) -> InvestmentModel | None: - """OnOff feature""" + """Investment feature""" if 'investment' not in self.submodels: return None return self.submodels['investment'] @property - def previous_states(self) -> xr.DataArray | None: - """Previous states of the flow rate""" + def previous_status(self) -> xr.DataArray | None: + """Previous status of the flow rate""" # TODO: This would be nicer to handle in the Flow itself, and allow DataArrays as well. previous_flow_rate = self.element.previous_flow_rate if previous_flow_rate is None: @@ -923,7 +923,7 @@ class ComponentModel(ElementModel): element: Component # Type hint def __init__(self, model: FlowSystemModel, element: Component): - self.on_off: OnOffModel | None = None + self.status: StatusModel | None = None super().__init__(model, element) def _do_modeling(self): @@ -932,51 +932,52 @@ def _do_modeling(self): all_flows = self.element.inputs + self.element.outputs - # Set on_off_parameters on flows if needed - if self.element.on_off_parameters: + # Set status_parameters on flows if needed + if self.element.status_parameters: for flow in all_flows: - if flow.on_off_parameters is None: - flow.on_off_parameters = OnOffParameters() + if flow.status_parameters is None: + flow.status_parameters = StatusParameters() if self.element.prevent_simultaneous_flows: for flow in self.element.prevent_simultaneous_flows: - if flow.on_off_parameters is None: - flow.on_off_parameters = OnOffParameters() + if flow.status_parameters is None: + flow.status_parameters = StatusParameters() # Create FlowModels (which creates their variables and constraints) for flow in all_flows: self.add_submodels(flow.create_model(self._model), short_name=flow.label) - # Create component on variable and OnOffModel if needed - if self.element.on_off_parameters: - on = self.add_variables(binary=True, short_name='on', coords=self._model.get_coords()) + # Create component status variable and StatusModel if needed + if self.element.status_parameters: + status = self.add_variables(binary=True, short_name='status', coords=self._model.get_coords()) if len(all_flows) == 1: - self.add_constraints(on == all_flows[0].submodel.on_off.on, short_name='on') + self.add_constraints(status == all_flows[0].submodel.status.status, short_name='status') else: - flow_ons = [flow.submodel.on_off.on for flow in all_flows] + flow_statuses = [flow.submodel.status.status for flow in all_flows] # TODO: Is the EPSILON even necessary? - self.add_constraints(on <= sum(flow_ons) + CONFIG.Modeling.epsilon, short_name='on|ub') + self.add_constraints(status <= sum(flow_statuses) + CONFIG.Modeling.epsilon, short_name='status|ub') self.add_constraints( - on >= sum(flow_ons) / (len(flow_ons) + CONFIG.Modeling.epsilon), short_name='on|lb' + status >= sum(flow_statuses) / (len(flow_statuses) + CONFIG.Modeling.epsilon), + short_name='status|lb', ) - self.on_off = self.add_submodels( - OnOffModel( + self.status = self.add_submodels( + StatusModel( model=self._model, label_of_element=self.label_of_element, - parameters=self.element.on_off_parameters, - on_variable=on, + parameters=self.element.status_parameters, + status=status, label_of_model=self.label_of_element, - previous_states=self.previous_states, + previous_status=self.previous_status, ), - short_name='on_off', + short_name='status', ) if self.element.prevent_simultaneous_flows: # Simultanious Useage --> Only One FLow is On at a time, but needs a Binary for every flow ModelingPrimitives.mutual_exclusivity_constraint( self, - binary_variables=[flow.submodel.on_off.on for flow in self.element.prevent_simultaneous_flows], + binary_variables=[flow.submodel.status.status for flow in self.element.prevent_simultaneous_flows], short_name='prevent_simultaneous_use', ) @@ -989,21 +990,21 @@ def results_structure(self): } @property - def previous_states(self) -> xr.DataArray | None: - """Previous state of the component, derived from its flows""" - if self.element.on_off_parameters is None: - raise ValueError(f'OnOffModel not present in \n{self}\nCant access previous_states') + def previous_status(self) -> xr.DataArray | None: + """Previous status of the component, derived from its flows""" + if self.element.status_parameters is None: + raise ValueError(f'StatusModel not present in \n{self}\nCant access previous_status') - previous_states = [flow.submodel.on_off._previous_states for flow in self.element.inputs + self.element.outputs] - previous_states = [da for da in previous_states if da is not None] + previous_status = [flow.submodel.status._previous_status for flow in self.element.inputs + self.element.outputs] + previous_status = [da for da in previous_status if da is not None] - if not previous_states: # Empty list + if not previous_status: # Empty list return None - max_len = max(da.sizes['time'] for da in previous_states) + max_len = max(da.sizes['time'] for da in previous_status) - padded_previous_states = [ + padded_previous_status = [ da.assign_coords(time=range(-da.sizes['time'], 0)).reindex(time=range(-max_len, 0), fill_value=0) - for da in previous_states + for da in previous_status ] - return xr.concat(padded_previous_states, dim='flow').any(dim='flow').astype(int) + return xr.concat(padded_previous_status, dim='flow').any(dim='flow').astype(int) diff --git a/flixopt/features.py b/flixopt/features.py index 8c4bf7c70..653a2fb92 100644 --- a/flixopt/features.py +++ b/flixopt/features.py @@ -16,15 +16,17 @@ if TYPE_CHECKING: from collections.abc import Collection + import xarray as xr + from .core import FlowSystemDimensions - from .interface import InvestParameters, OnOffParameters, Piecewise + from .interface import InvestParameters, Piecewise, StatusParameters from .types import Numeric_PS, Numeric_TPS class InvestmentModel(Submodel): """ This feature model is used to model the investment of a variable. - It applies the corresponding bounds to the variable and the on/off state of the variable. + It applies the corresponding bounds to the variable and the active/inactive state of the variable. Args: model: The optimization model instance @@ -75,7 +77,7 @@ def _create_variables_and_constraints(self): BoundingPatterns.bounds_with_state( self, variable=self.size, - variable_state=self._variables['invested'], + state=self._variables['invested'], bounds=(self.parameters.minimum_or_fixed_size, self.parameters.maximum_or_fixed_size), ) @@ -144,32 +146,33 @@ def invested(self) -> linopy.Variable | None: return self._variables['invested'] -class OnOffModel(Submodel): - """OnOff model using factory patterns""" +class StatusModel(Submodel): + """Status model for equipment with binary active/inactive states""" def __init__( self, model: FlowSystemModel, label_of_element: str, - parameters: OnOffParameters, - on_variable: linopy.Variable, - previous_states: Numeric_TPS | None, + parameters: StatusParameters, + status: linopy.Variable, + previous_status: xr.DataArray | None, label_of_model: str | None = None, ): """ - This feature model is used to model the on/off state of flow_rate(s). It does not matter of the flow_rates are - bounded by a size variable or by a hard bound. THe used bound here is the absolute highest/lowest bound! + This feature model is used to model the status (active/inactive) state of flow_rate(s). + It does not matter if the flow_rates are bounded by a size variable or by a hard bound. + The used bound here is the absolute highest/lowest bound! Args: model: The optimization model instance label_of_element: The label of the parent (Element). Used to construct the full label of the model. parameters: The parameters of the feature model. - on_variable: The variable that determines the on state - previous_states: The previous flow_rates + status: The variable that determines the active state + previous_status: The previous flow_rates label_of_model: The label of the model. This is needed to construct the full label of the model. """ - self.on = on_variable - self._previous_states = previous_states + self.status = status + self._previous_status = previous_status self.parameters = parameters super().__init__(model, label_of_element, label_of_model=label_of_model) @@ -177,92 +180,95 @@ def _do_modeling(self): """Create variables, constraints, and nested submodels""" super()._do_modeling() - if self.parameters.use_off: - off = self.add_variables(binary=True, short_name='off', coords=self._model.get_coords()) - self.add_constraints(self.on + off == 1, short_name='complementary') + # Create a separate binary 'inactive' variable when needed for downtime tracking or explicit use + # When not needed, the expression (1 - self.status) can be used instead + if self.parameters.use_downtime_tracking: + inactive = self.add_variables(binary=True, short_name='inactive', coords=self._model.get_coords()) + self.add_constraints(self.status + inactive == 1, short_name='complementary') # 3. Total duration tracking using existing pattern ModelingPrimitives.expression_tracking_variable( self, - tracked_expression=(self.on * self._model.hours_per_step).sum('time'), + tracked_expression=(self.status * self._model.hours_per_step).sum('time'), bounds=( - self.parameters.on_hours_min if self.parameters.on_hours_min is not None else 0, - self.parameters.on_hours_max if self.parameters.on_hours_max is not None else np.inf, - ), # TODO: self._model.hours_per_step.sum('time').item() + self._get_previous_on_duration()) - short_name='on_hours_total', + self.parameters.active_hours_min if self.parameters.active_hours_min is not None else 0, + self.parameters.active_hours_max + if self.parameters.active_hours_max is not None + else self._model.hours_per_step.sum('time').max().item(), + ), + short_name='active_hours', coords=['period', 'scenario'], ) # 4. Switch tracking using existing pattern - if self.parameters.use_switch_on: - self.add_variables(binary=True, short_name='switch|on', coords=self.get_coords()) - self.add_variables(binary=True, short_name='switch|off', coords=self.get_coords()) + if self.parameters.use_startup_tracking: + self.add_variables(binary=True, short_name='startup', coords=self.get_coords()) + self.add_variables(binary=True, short_name='shutdown', coords=self.get_coords()) BoundingPatterns.state_transition_bounds( self, - state_variable=self.on, - switch_on=self.switch_on, - switch_off=self.switch_off, + state=self.status, + activate=self.startup, + deactivate=self.shutdown, name=f'{self.label_of_model}|switch', - previous_state=self._previous_states.isel(time=-1) if self._previous_states is not None else 0, + previous_state=self._previous_status.isel(time=-1) if self._previous_status is not None else 0, coord='time', ) - if self.parameters.switch_on_max is not None: + if self.parameters.startup_limit is not None: count = self.add_variables( lower=0, - upper=self.parameters.switch_on_max, + upper=self.parameters.startup_limit, coords=self._model.get_coords(('period', 'scenario')), - short_name='switch|count', + short_name='startup_count', ) - self.add_constraints(count == self.switch_on.sum('time'), short_name='switch|count') + self.add_constraints(count == self.startup.sum('time'), short_name='startup_count') - # 5. Consecutive on duration using existing pattern - if self.parameters.use_consecutive_on_hours: + # 5. Consecutive active duration (uptime) using existing pattern + if self.parameters.use_uptime_tracking: ModelingPrimitives.consecutive_duration_tracking( self, - state_variable=self.on, - short_name='consecutive_on_hours', - minimum_duration=self.parameters.consecutive_on_hours_min, - maximum_duration=self.parameters.consecutive_on_hours_max, + state=self.status, + short_name='uptime', + minimum_duration=self.parameters.min_uptime, + maximum_duration=self.parameters.max_uptime, duration_per_step=self.hours_per_step, duration_dim='time', - previous_duration=self._get_previous_on_duration(), + previous_duration=self._get_previous_uptime(), ) - # 6. Consecutive off duration using existing pattern - if self.parameters.use_consecutive_off_hours: + # 6. Consecutive inactive duration (downtime) using existing pattern + if self.parameters.use_downtime_tracking: ModelingPrimitives.consecutive_duration_tracking( self, - state_variable=self.off, - short_name='consecutive_off_hours', - minimum_duration=self.parameters.consecutive_off_hours_min, - maximum_duration=self.parameters.consecutive_off_hours_max, + state=self.inactive, + short_name='downtime', + minimum_duration=self.parameters.min_downtime, + maximum_duration=self.parameters.max_downtime, duration_per_step=self.hours_per_step, duration_dim='time', - previous_duration=self._get_previous_off_duration(), + previous_duration=self._get_previous_downtime(), ) - # TODO: self._add_effects() def _add_effects(self): """Add operational effects""" - if self.parameters.effects_per_running_hour: + if self.parameters.effects_per_active_hour: self._model.effects.add_share_to_effects( name=self.label_of_element, expressions={ - effect: self.on * factor * self._model.hours_per_step - for effect, factor in self.parameters.effects_per_running_hour.items() + effect: self.status * factor * self._model.hours_per_step + for effect, factor in self.parameters.effects_per_active_hour.items() }, target='temporal', ) - if self.parameters.effects_per_switch_on: + if self.parameters.effects_per_startup: self._model.effects.add_share_to_effects( name=self.label_of_element, expressions={ - effect: self.switch_on * factor for effect, factor in self.parameters.effects_per_switch_on.items() + effect: self.startup * factor for effect, factor in self.parameters.effects_per_startup.items() }, target='temporal', ) @@ -270,55 +276,66 @@ def _add_effects(self): # Properties access variables from Submodel's tracking system @property - def on_hours_total(self) -> linopy.Variable: - """Total on hours variable""" - return self['on_hours_total'] + def active_hours(self) -> linopy.Variable: + """Total active hours variable""" + return self['active_hours'] @property - def off(self) -> linopy.Variable | None: - """Binary off state variable""" - return self.get('off') + def inactive(self) -> linopy.Variable | None: + """Binary inactive state variable. + + Note: + Only created when downtime tracking is enabled (min_downtime or max_downtime set). + For general use, prefer the expression `1 - status` instead of this variable. + """ + return self.get('inactive') @property - def switch_on(self) -> linopy.Variable | None: - """Switch on variable""" - return self.get('switch|on') + def startup(self) -> linopy.Variable | None: + """Startup variable""" + return self.get('startup') @property - def switch_off(self) -> linopy.Variable | None: - """Switch off variable""" - return self.get('switch|off') + def shutdown(self) -> linopy.Variable | None: + """Shutdown variable""" + return self.get('shutdown') @property - def switch_on_nr(self) -> linopy.Variable | None: - """Number of switch-ons variable""" - return self.get('switch|count') + def startup_count(self) -> linopy.Variable | None: + """Number of startups variable""" + return self.get('startup_count') @property - def consecutive_on_hours(self) -> linopy.Variable | None: - """Consecutive on hours variable""" - return self.get('consecutive_on_hours') + def uptime(self) -> linopy.Variable | None: + """Consecutive active hours (uptime) variable""" + return self.get('uptime') @property - def consecutive_off_hours(self) -> linopy.Variable | None: - """Consecutive off hours variable""" - return self.get('consecutive_off_hours') + def downtime(self) -> linopy.Variable | None: + """Consecutive inactive hours (downtime) variable""" + return self.get('downtime') - def _get_previous_on_duration(self): - """Get previous on duration. Previously OFF by default, for one timestep""" + def _get_previous_uptime(self): + """Get previous uptime (consecutive active hours). + + Returns 0 if no previous status is provided (assumes previously inactive). + """ hours_per_step = self._model.hours_per_step.isel(time=0).min().item() - if self._previous_states is None: + if self._previous_status is None: return 0 else: - return ModelingUtilities.compute_consecutive_hours_in_state(self._previous_states, hours_per_step) + return ModelingUtilities.compute_consecutive_hours_in_state(self._previous_status, hours_per_step) - def _get_previous_off_duration(self): - """Get previous off duration. Previously OFF by default, for one timestep""" + def _get_previous_downtime(self): + """Get previous downtime (consecutive inactive hours). + + Returns one timestep duration if no previous status is provided (assumes previously inactive). + """ hours_per_step = self._model.hours_per_step.isel(time=0).min().item() - if self._previous_states is None: + if self._previous_status is None: return hours_per_step else: - return ModelingUtilities.compute_consecutive_hours_in_state(self._previous_states * -1 + 1, hours_per_step) + return ModelingUtilities.compute_consecutive_hours_in_state(self._previous_status * -1 + 1, hours_per_step) class PieceModel(Submodel): diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 52c403396..c0deaa1ca 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -52,7 +52,7 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): hours_of_last_timestep: Duration of the last timestep. If None, computed from the last time interval. hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the first time interval. Can be a scalar (all previous timesteps have same duration) or array (different durations). - Used to calculate previous values (e.g., consecutive_on_hours). + Used to calculate previous values (e.g., uptime and downtime). weight_of_last_period: Weight/duration of the last period. If None, computed from the last period interval. Used for calculating sums over periods in multi-period models. scenario_weights: The weights of each scenario. If None, all scenarios have the same weight (normalized to 1). @@ -76,7 +76,7 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): >>> flow_system = fx.FlowSystem(timesteps) >>> >>> # Add elements to the system - >>> boiler = fx.Component('Boiler', inputs=[heat_flow], on_off_parameters=...) + >>> boiler = fx.Component('Boiler', inputs=[heat_flow], status_parameters=...) >>> heat_bus = fx.Bus('Heat', excess_penalty_per_flow_hour=1e4) >>> costs = fx.Effect('costs', is_objective=True, is_standard=True) >>> flow_system.add_elements(boiler, heat_bus, costs) diff --git a/flixopt/interface.py b/flixopt/interface.py index cfa210f6d..30db4876f 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -1,5 +1,5 @@ """ -This module contains classes to collect Parameters for the Investment and OnOff decisions. +This module contains classes to collect Parameters for the Investment and Status decisions. These are tightly connected to features.py """ @@ -413,7 +413,7 @@ class PiecewiseConversion(Interface): operate in certain ranges (e.g., minimum loads, unstable regions). **Discrete Modes**: Use pieces with identical start/end values to model - equipment with fixed operating points (e.g., on/off, discrete speeds). + equipment with fixed operating points (e.g., on/inactive, discrete speeds). **Efficiency Changes**: Coordinate input and output pieces to reflect changing conversion efficiency across operating ranges. @@ -1006,19 +1006,19 @@ def compute_linked_periods(first_period: int, last_period: int, periods: pd.Inde @register_class_for_io -class OnOffParameters(Interface): - """Define operational constraints and effects for binary on/off equipment behavior. +class StatusParameters(Interface): + """Define operational constraints and effects for binary status equipment behavior. - This class models equipment that operates in discrete states (on/off) rather than + This class models equipment that operates in discrete states (active/inactive) rather than continuous operation, capturing realistic operational constraints and associated costs. It handles complex equipment behavior including startup costs, minimum run times, cycling limitations, and maintenance scheduling requirements. Key Modeling Capabilities: - **Switching Costs**: One-time costs for starting equipment (fuel, wear, labor) - **Runtime Constraints**: Minimum and maximum continuous operation periods - **Cycling Limits**: Maximum number of starts to prevent excessive wear - **Operating Hours**: Total runtime limits and requirements over time horizon + **Startup Costs**: One-time costs for starting equipment (fuel, wear, labor) + **Runtime Constraints**: Minimum and maximum continuous operation periods (uptime/downtime) + **Cycling Limits**: Maximum number of startups to prevent excessive wear + **Operating Hours**: Total active hours limits and requirements over time horizon Typical Equipment Applications: - **Power Plants**: Combined cycle units, steam turbines with startup costs @@ -1029,45 +1029,45 @@ class OnOffParameters(Interface): Mathematical Formulation: See the complete mathematical model in the documentation: - [OnOffParameters](../user-guide/mathematical-notation/features/OnOffParameters.md) + [StatusParameters](../user-guide/mathematical-notation/features/StatusParameters.md) Args: - effects_per_switch_on: Costs or impacts incurred for each transition from - off state (var_on=0) to on state (var_on=1). Represents startup costs, + effects_per_startup: Costs or impacts incurred for each transition from + inactive state (status=0) to active state (status=1). Represents startup costs, wear and tear, or other switching impacts. Dictionary mapping effect names to values (e.g., {'cost': 500, 'maintenance_hours': 2}). - effects_per_running_hour: Ongoing costs or impacts while equipment operates - in the on state. Includes fuel costs, labor, consumables, or emissions. + effects_per_active_hour: Ongoing costs or impacts while equipment operates + in the active state. Includes fuel costs, labor, consumables, or emissions. Dictionary mapping effect names to hourly values (e.g., {'fuel_cost': 45}). - on_hours_min: Minimum total operating hours per period. + active_hours_min: Minimum total active hours across the entire time horizon per period. Ensures equipment meets minimum utilization requirements or contractual obligations (e.g., power purchase agreements, maintenance schedules). - on_hours_max: Maximum total operating hours per period. + active_hours_max: Maximum total active hours across the entire time horizon per period. Limits equipment usage due to maintenance schedules, fuel availability, environmental permits, or equipment lifetime constraints. - consecutive_on_hours_min: Minimum continuous operating duration once started. + min_uptime: Minimum continuous operating duration once started (unit commitment term). Models minimum run times due to thermal constraints, process stability, or efficiency considerations. Can be time-varying to reflect different constraints across the planning horizon. - consecutive_on_hours_max: Maximum continuous operating duration in one campaign. + max_uptime: Maximum continuous operating duration in one campaign (unit commitment term). Models mandatory maintenance intervals, process batch sizes, or equipment thermal limits requiring periodic shutdowns. - consecutive_off_hours_min: Minimum continuous shutdown duration between operations. + min_downtime: Minimum continuous shutdown duration between operations (unit commitment term). Models cooling periods, maintenance requirements, or process constraints that prevent immediate restart after shutdown. - consecutive_off_hours_max: Maximum continuous shutdown duration before mandatory + max_downtime: Maximum continuous shutdown duration before mandatory restart. Models equipment preservation, process stability, or contractual requirements for minimum activity levels. - switch_on_max: Maximum number of startup operations per period. + startup_limit: Maximum number of startup operations across the time horizon per period.. Limits equipment cycling to reduce wear, maintenance costs, or comply with operational constraints (e.g., grid stability requirements). - force_switch_on: When True, creates switch-on variables even without explicit - switch_on_max constraint. Useful for tracking or reporting startup + force_startup_tracking: When True, creates startup variables even without explicit + startup_limit constraint. Useful for tracking or reporting startup events without enforcing limits. Note: **Time Series Boundary Handling**: The final time period constraints for - consecutive_on_hours_min/max and consecutive_off_hours_min/max are not + min_uptime/max_uptime and min_downtime/max_downtime are not enforced, allowing the optimization to end with ongoing campaigns that may be shorter than the specified minimums or longer than maximums. @@ -1075,105 +1075,105 @@ class OnOffParameters(Interface): Combined cycle power plant with startup costs and minimum run time: ```python - power_plant_operation = OnOffParameters( - effects_per_switch_on={ + power_plant_operation = StatusParameters( + effects_per_startup={ 'startup_cost': 25000, # €25,000 per startup 'startup_fuel': 150, # GJ natural gas for startup 'startup_time': 4, # Hours to reach full output 'maintenance_impact': 0.1, # Fractional life consumption }, - effects_per_running_hour={ - 'fixed_om': 125, # Fixed O&M costs while running + effects_per_active_hour={ + 'fixed_om': 125, # Fixed O&M costs while active 'auxiliary_power': 2.5, # MW parasitic loads }, - consecutive_on_hours_min=8, # Minimum 8-hour run once started - consecutive_off_hours_min=4, # Minimum 4-hour cooling period - on_hours_max=6000, # Annual operating limit + min_uptime=8, # Minimum 8-hour run once started + min_downtime=4, # Minimum 4-hour cooling period + active_hours_max=6000, # Annual operating limit ) ``` Industrial batch process with cycling limits: ```python - batch_reactor = OnOffParameters( - effects_per_switch_on={ + batch_reactor = StatusParameters( + effects_per_startup={ 'setup_cost': 1500, # Labor and materials for startup 'catalyst_consumption': 5, # kg catalyst per batch 'cleaning_chemicals': 200, # L cleaning solution }, - effects_per_running_hour={ + effects_per_active_hour={ 'steam': 2.5, # t/h process steam 'electricity': 150, # kWh electrical load 'cooling_water': 50, # m³/h cooling water }, - consecutive_on_hours_min=12, # Minimum batch size (12 hours) - consecutive_on_hours_max=24, # Maximum batch size (24 hours) - consecutive_off_hours_min=6, # Cleaning and setup time - switch_on_max=200, # Maximum 200 batches per period - on_hours_max=4000, # Maximum production time + min_uptime=12, # Minimum batch size (12 hours) + max_uptime=24, # Maximum batch size (24 hours) + min_downtime=6, # Cleaning and setup time + startup_limit=200, # Maximum 200 batches per period + active_hours_max=4000, # Maximum production time ) ``` HVAC system with thermostat control and maintenance: ```python - hvac_operation = OnOffParameters( - effects_per_switch_on={ + hvac_operation = StatusParameters( + effects_per_startup={ 'compressor_wear': 0.5, # Hours of compressor life per start 'inrush_current': 15, # kW peak demand on startup }, - effects_per_running_hour={ + effects_per_active_hour={ 'electricity': 25, # kW electrical consumption 'maintenance': 0.12, # €/hour maintenance reserve }, - consecutive_on_hours_min=1, # Minimum 1-hour run to avoid cycling - consecutive_off_hours_min=0.5, # 30-minute minimum off time - switch_on_max=2000, # Limit cycling for compressor life - on_hours_min=2000, # Minimum operation for humidity control - on_hours_max=5000, # Maximum operation for energy budget + min_uptime=1, # Minimum 1-hour run to avoid cycling + min_downtime=0.5, # 30-minute minimum inactive time + startup_limit=2000, # Limit cycling for compressor life + active_hours_min=2000, # Minimum operation for humidity control + active_hours_max=5000, # Maximum operation for energy budget ) ``` Backup generator with testing and maintenance requirements: ```python - backup_generator = OnOffParameters( - effects_per_switch_on={ + backup_generator = StatusParameters( + effects_per_startup={ 'fuel_priming': 50, # L diesel for system priming 'wear_factor': 1.0, # Start cycles impact on maintenance 'testing_labor': 2, # Hours technician time per test }, - effects_per_running_hour={ + effects_per_active_hour={ 'fuel_consumption': 180, # L/h diesel consumption 'emissions_permit': 15, # € emissions allowance cost 'noise_penalty': 25, # € noise compliance cost }, - consecutive_on_hours_min=0.5, # Minimum test duration (30 min) - consecutive_off_hours_max=720, # Maximum 30 days between tests - switch_on_max=52, # Weekly testing limit - on_hours_min=26, # Minimum annual testing (0.5h × 52) - on_hours_max=200, # Maximum runtime (emergencies + tests) + min_uptime=0.5, # Minimum test duration (30 min) + max_downtime=720, # Maximum 30 days between tests + startup_limit=52, # Weekly testing limit + active_hours_min=26, # Minimum annual testing (0.5h × 52) + active_hours_max=200, # Maximum runtime (emergencies + tests) ) ``` Peak shaving battery with cycling degradation: ```python - battery_cycling = OnOffParameters( - effects_per_switch_on={ + battery_cycling = StatusParameters( + effects_per_startup={ 'cycle_degradation': 0.01, # % capacity loss per cycle 'inverter_startup': 0.5, # kWh losses during startup }, - effects_per_running_hour={ + effects_per_active_hour={ 'standby_losses': 2, # kW standby consumption 'cooling': 5, # kW thermal management 'inverter_losses': 8, # kW conversion losses }, - consecutive_on_hours_min=1, # Minimum discharge duration - consecutive_on_hours_max=4, # Maximum continuous discharge - consecutive_off_hours_min=1, # Minimum rest between cycles - switch_on_max=365, # Daily cycling limit - force_switch_on=True, # Track all cycling events + min_uptime=1, # Minimum discharge duration + max_uptime=4, # Maximum continuous discharge + min_downtime=1, # Minimum rest between cycles + startup_limit=365, # Daily cycling limit + force_startup_tracking=True, # Track all cycling events ) ``` @@ -1189,86 +1189,73 @@ class OnOffParameters(Interface): def __init__( self, - effects_per_switch_on: Effect_TPS | Numeric_TPS | None = None, - effects_per_running_hour: Effect_TPS | Numeric_TPS | None = None, - on_hours_min: Numeric_PS | None = None, - on_hours_max: Numeric_PS | None = None, - consecutive_on_hours_min: Numeric_TPS | None = None, - consecutive_on_hours_max: Numeric_TPS | None = None, - consecutive_off_hours_min: Numeric_TPS | None = None, - consecutive_off_hours_max: Numeric_TPS | None = None, - switch_on_max: Numeric_PS | None = None, - force_switch_on: bool = False, + effects_per_startup: Effect_TPS | Numeric_TPS | None = None, + effects_per_active_hour: Effect_TPS | Numeric_TPS | None = None, + active_hours_min: Numeric_PS | None = None, + active_hours_max: Numeric_PS | None = None, + min_uptime: Numeric_TPS | None = None, + max_uptime: Numeric_TPS | None = None, + min_downtime: Numeric_TPS | None = None, + max_downtime: Numeric_TPS | None = None, + startup_limit: Numeric_PS | None = None, + force_startup_tracking: bool = False, ): - self.effects_per_switch_on = effects_per_switch_on if effects_per_switch_on is not None else {} - self.effects_per_running_hour = effects_per_running_hour if effects_per_running_hour is not None else {} - self.on_hours_min = on_hours_min - self.on_hours_max = on_hours_max - self.consecutive_on_hours_min = consecutive_on_hours_min - self.consecutive_on_hours_max = consecutive_on_hours_max - self.consecutive_off_hours_min = consecutive_off_hours_min - self.consecutive_off_hours_max = consecutive_off_hours_max - self.switch_on_max = switch_on_max - self.force_switch_on: bool = force_switch_on + self.effects_per_startup = effects_per_startup if effects_per_startup is not None else {} + self.effects_per_active_hour = effects_per_active_hour if effects_per_active_hour is not None else {} + self.active_hours_min = active_hours_min + self.active_hours_max = active_hours_max + self.min_uptime = min_uptime + self.max_uptime = max_uptime + self.min_downtime = min_downtime + self.max_downtime = max_downtime + self.startup_limit = startup_limit + self.force_startup_tracking: bool = force_startup_tracking def transform_data(self, name_prefix: str = '') -> None: - self.effects_per_switch_on = self._fit_effect_coords( + self.effects_per_startup = self._fit_effect_coords( prefix=name_prefix, - effect_values=self.effects_per_switch_on, - suffix='per_switch_on', + effect_values=self.effects_per_startup, + suffix='per_startup', ) - self.effects_per_running_hour = self._fit_effect_coords( + self.effects_per_active_hour = self._fit_effect_coords( prefix=name_prefix, - effect_values=self.effects_per_running_hour, - suffix='per_running_hour', + effect_values=self.effects_per_active_hour, + suffix='per_active_hour', ) - self.consecutive_on_hours_min = self._fit_coords( - f'{name_prefix}|consecutive_on_hours_min', self.consecutive_on_hours_min + self.min_uptime = self._fit_coords(f'{name_prefix}|min_uptime', self.min_uptime) + self.max_uptime = self._fit_coords(f'{name_prefix}|max_uptime', self.max_uptime) + self.min_downtime = self._fit_coords(f'{name_prefix}|min_downtime', self.min_downtime) + self.max_downtime = self._fit_coords(f'{name_prefix}|max_downtime', self.max_downtime) + self.active_hours_max = self._fit_coords( + f'{name_prefix}|active_hours_max', self.active_hours_max, dims=['period', 'scenario'] ) - self.consecutive_on_hours_max = self._fit_coords( - f'{name_prefix}|consecutive_on_hours_max', self.consecutive_on_hours_max + self.active_hours_min = self._fit_coords( + f'{name_prefix}|active_hours_min', self.active_hours_min, dims=['period', 'scenario'] ) - self.consecutive_off_hours_min = self._fit_coords( - f'{name_prefix}|consecutive_off_hours_min', self.consecutive_off_hours_min + self.startup_limit = self._fit_coords( + f'{name_prefix}|startup_limit', self.startup_limit, dims=['period', 'scenario'] ) - self.consecutive_off_hours_max = self._fit_coords( - f'{name_prefix}|consecutive_off_hours_max', self.consecutive_off_hours_max - ) - self.on_hours_max = self._fit_coords( - f'{name_prefix}|on_hours_max', self.on_hours_max, dims=['period', 'scenario'] - ) - self.on_hours_min = self._fit_coords( - f'{name_prefix}|on_hours_min', self.on_hours_min, dims=['period', 'scenario'] - ) - self.switch_on_max = self._fit_coords( - f'{name_prefix}|switch_on_max', self.switch_on_max, dims=['period', 'scenario'] - ) - - @property - def use_off(self) -> bool: - """Proxy: whether OFF variable is required""" - return self.use_consecutive_off_hours @property - def use_consecutive_on_hours(self) -> bool: - """Determines whether a Variable for consecutive on hours is needed or not""" - return any(param is not None for param in [self.consecutive_on_hours_min, self.consecutive_on_hours_max]) + def use_uptime_tracking(self) -> bool: + """Determines whether a Variable for uptime (consecutive active hours) is needed or not""" + return any(param is not None for param in [self.min_uptime, self.max_uptime]) @property - def use_consecutive_off_hours(self) -> bool: - """Determines whether a Variable for consecutive off hours is needed or not""" - return any(param is not None for param in [self.consecutive_off_hours_min, self.consecutive_off_hours_max]) + def use_downtime_tracking(self) -> bool: + """Determines whether a Variable for downtime (consecutive inactive hours) is needed or not""" + return any(param is not None for param in [self.min_downtime, self.max_downtime]) @property - def use_switch_on(self) -> bool: - """Determines whether a variable for switch_on is needed or not""" - if self.force_switch_on: + def use_startup_tracking(self) -> bool: + """Determines whether a variable for startup is needed or not""" + if self.force_startup_tracking: return True return any( self._has_value(param) for param in [ - self.effects_per_switch_on, - self.switch_on_max, + self.effects_per_startup, + self.startup_limit, ] ) diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 9ca73519e..8326fe6c5 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -14,7 +14,7 @@ if TYPE_CHECKING: from .elements import Flow - from .interface import OnOffParameters + from .interface import StatusParameters from .types import Numeric_TPS logger = logging.getLogger('flixopt') @@ -35,7 +35,7 @@ class Boiler(LinearConverter): output to fuel input energy content. fuel_flow: Fuel input-flow representing fuel consumption. thermal_flow: Thermal output-flow representing heat generation. - on_off_parameters: Parameters defining binary operation constraints and costs. + status_parameters: Parameters defining status, startup and shutdown constraints and effects meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -59,9 +59,9 @@ class Boiler(LinearConverter): thermal_efficiency=seasonal_efficiency_profile, # Time-varying efficiency fuel_flow=biomass_flow, thermal_flow=district_heat_flow, - on_off_parameters=OnOffParameters( - consecutive_on_hours_min=4, # Minimum 4-hour operation - effects_per_switch_on={'startup_fuel': 50}, # Startup fuel penalty + status_parameters=StatusParameters( + min_uptime=4, # Minimum 4-hour operation + effects_per_startup={'startup_fuel': 50}, # Startup fuel penalty ), ) ``` @@ -79,7 +79,7 @@ def __init__( thermal_efficiency: Numeric_TPS | None = None, fuel_flow: Flow | None = None, thermal_flow: Flow | None = None, - on_off_parameters: OnOffParameters | None = None, + status_parameters: StatusParameters | None = None, meta_data: dict | None = None, ): # Validate required parameters @@ -94,7 +94,7 @@ def __init__( label, inputs=[fuel_flow], outputs=[thermal_flow], - on_off_parameters=on_off_parameters, + status_parameters=status_parameters, meta_data=meta_data, ) self.fuel_flow = fuel_flow @@ -128,7 +128,7 @@ class Power2Heat(LinearConverter): electrode boilers or systems with distribution losses. electrical_flow: Electrical input-flow representing electricity consumption. thermal_flow: Thermal output-flow representing heat generation. - on_off_parameters: Parameters defining binary operation constraints and costs. + status_parameters: Parameters defining status, startup and shutdown constraints and effects meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -152,9 +152,9 @@ class Power2Heat(LinearConverter): thermal_efficiency=0.95, # 95% efficiency including boiler losses electrical_flow=industrial_electricity, thermal_flow=process_steam_flow, - on_off_parameters=OnOffParameters( - consecutive_on_hours_min=1, # Minimum 1-hour operation - effects_per_switch_on={'startup_cost': 100}, + status_parameters=StatusParameters( + min_uptime=1, # Minimum 1-hour operation + effects_per_startup={'startup_cost': 100}, ), ) ``` @@ -174,7 +174,7 @@ def __init__( thermal_efficiency: Numeric_TPS | None = None, electrical_flow: Flow | None = None, thermal_flow: Flow | None = None, - on_off_parameters: OnOffParameters | None = None, + status_parameters: StatusParameters | None = None, meta_data: dict | None = None, ): # Validate required parameters @@ -189,7 +189,7 @@ def __init__( label, inputs=[electrical_flow], outputs=[thermal_flow], - on_off_parameters=on_off_parameters, + status_parameters=status_parameters, meta_data=meta_data, ) @@ -224,7 +224,7 @@ class HeatPump(LinearConverter): additional energy from the environment. electrical_flow: Electrical input-flow representing electricity consumption. thermal_flow: Thermal output-flow representing heat generation. - on_off_parameters: Parameters defining binary operation constraints and costs. + status_parameters: Parameters defining status, startup and shutdown constraints and effects meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -248,9 +248,9 @@ class HeatPump(LinearConverter): cop=temperature_dependent_cop, # Time-varying COP based on ground temp electrical_flow=electricity_flow, thermal_flow=radiant_heating_flow, - on_off_parameters=OnOffParameters( - consecutive_on_hours_min=2, # Avoid frequent cycling - effects_per_running_hour={'maintenance': 0.5}, + status_parameters=StatusParameters( + min_uptime=2, # Avoid frequent cycling + effects_per_active_hour={'maintenance': 0.5}, ), ) ``` @@ -269,7 +269,7 @@ def __init__( cop: Numeric_TPS | None = None, electrical_flow: Flow | None = None, thermal_flow: Flow | None = None, - on_off_parameters: OnOffParameters | None = None, + status_parameters: StatusParameters | None = None, meta_data: dict | None = None, ): # Validate required parameters @@ -285,7 +285,7 @@ def __init__( inputs=[electrical_flow], outputs=[thermal_flow], conversion_factors=[], - on_off_parameters=on_off_parameters, + status_parameters=status_parameters, meta_data=meta_data, ) self.electrical_flow = electrical_flow @@ -319,7 +319,7 @@ class CoolingTower(LinearConverter): of thermal power that must be supplied as electricity for fans and pumps. electrical_flow: Electrical input-flow representing electricity consumption for fans/pumps. thermal_flow: Thermal input-flow representing waste heat to be rejected to environment. - on_off_parameters: Parameters defining binary operation constraints and costs. + status_parameters: Parameters defining status, startup and shutdown constraints and effects meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -343,9 +343,9 @@ class CoolingTower(LinearConverter): specific_electricity_demand=0.015, # 1.5% auxiliary power electrical_flow=auxiliary_electricity, thermal_flow=condenser_waste_heat, - on_off_parameters=OnOffParameters( - consecutive_on_hours_min=4, # Minimum operation time - effects_per_running_hour={'water_consumption': 2.5}, # m³/h + status_parameters=StatusParameters( + min_uptime=4, # Minimum operation time + effects_per_active_hour={'water_consumption': 2.5}, # m³/h ), ) ``` @@ -366,7 +366,7 @@ def __init__( specific_electricity_demand: Numeric_TPS, electrical_flow: Flow | None = None, thermal_flow: Flow | None = None, - on_off_parameters: OnOffParameters | None = None, + status_parameters: StatusParameters | None = None, meta_data: dict | None = None, ): # Validate required parameters @@ -379,7 +379,7 @@ def __init__( label, inputs=[electrical_flow, thermal_flow], outputs=[], - on_off_parameters=on_off_parameters, + status_parameters=status_parameters, meta_data=meta_data, ) @@ -416,7 +416,7 @@ class CHP(LinearConverter): fuel_flow: Fuel input-flow representing fuel consumption. electrical_flow: Electrical output-flow representing electricity generation. thermal_flow: Thermal output-flow representing heat generation. - on_off_parameters: Parameters defining binary operation constraints and costs. + status_parameters: Parameters defining status, startup and shutdown constraints and effects meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -444,10 +444,10 @@ class CHP(LinearConverter): fuel_flow=fuel_gas_flow, electrical_flow=plant_electricity, thermal_flow=process_steam, - on_off_parameters=OnOffParameters( - consecutive_on_hours_min=8, # Minimum 8-hour operation - effects_per_switch_on={'startup_cost': 5000}, - on_hours_max=6000, # Annual operating limit + status_parameters=StatusParameters( + min_uptime=8, # Minimum 8-hour operation + effects_per_startup={'startup_cost': 5000}, + active_hours_max=6000, # Annual operating limit ), ) ``` @@ -470,7 +470,7 @@ def __init__( fuel_flow: Flow | None = None, electrical_flow: Flow | None = None, thermal_flow: Flow | None = None, - on_off_parameters: OnOffParameters | None = None, + status_parameters: StatusParameters | None = None, meta_data: dict | None = None, ): # Validate required parameters @@ -490,7 +490,7 @@ def __init__( inputs=[fuel_flow], outputs=[thermal_flow, electrical_flow], conversion_factors=[{}, {}], - on_off_parameters=on_off_parameters, + status_parameters=status_parameters, meta_data=meta_data, ) @@ -546,7 +546,7 @@ class HeatPumpWithSource(LinearConverter): heat_source_flow: Heat source input-flow representing thermal energy extracted from environment (ground, air, water source). thermal_flow: Thermal output-flow representing useful heat delivered to the application. - on_off_parameters: Parameters defining binary operation constraints and costs. + status_parameters: Parameters defining status, startup and shutdown constraints and effects meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -572,9 +572,9 @@ class HeatPumpWithSource(LinearConverter): electrical_flow=electricity_consumption, heat_source_flow=industrial_heat_extraction, # Heat extracted from a industrial process or waste water thermal_flow=heat_supply, - on_off_parameters=OnOffParameters( - consecutive_on_hours_min=0.5, # 30-minute minimum runtime - effects_per_switch_on={'costs': 1000}, + status_parameters=StatusParameters( + min_uptime=0.5, # 30-minute minimum runtime + effects_per_startup={'costs': 1000}, ), ) ``` @@ -600,7 +600,7 @@ def __init__( electrical_flow: Flow | None = None, heat_source_flow: Flow | None = None, thermal_flow: Flow | None = None, - on_off_parameters: OnOffParameters | None = None, + status_parameters: StatusParameters | None = None, meta_data: dict | None = None, ): # Validate required parameters @@ -617,7 +617,7 @@ def __init__( label, inputs=[electrical_flow, heat_source_flow], outputs=[thermal_flow], - on_off_parameters=on_off_parameters, + status_parameters=status_parameters, meta_data=meta_data, ) self.electrical_flow = electrical_flow diff --git a/flixopt/modeling.py b/flixopt/modeling.py index 01a2c2410..6b81a0a4a 100644 --- a/flixopt/modeling.py +++ b/flixopt/modeling.py @@ -59,16 +59,16 @@ def count_consecutive_states( """Count consecutive steps in the final active state of a binary time series. This function counts how many consecutive time steps the series remains "on" - (non-zero) at the end of the time series. If the final state is "off", returns 0. + (non-zero) at the end of the time series. If the final state is "inactive", returns 0. Args: - binary_values: Binary DataArray with values close to 0 (off) or 1 (on). + binary_values: Binary DataArray with values close to 0 (inactive) or 1 (active). dim: Dimension along which to count consecutive states. epsilon: Tolerance for zero detection. Uses CONFIG.Modeling.epsilon if None. Returns: - Sum of values in the final consecutive "on" period. Returns 0.0 if the - final state is "off". + Sum of values in the final consecutive "active" period. Returns 0.0 if the + final state is "inactive". Examples: >>> arr = xr.DataArray([0, 0, 1, 1, 1, 0, 1, 1], dims=['time']) @@ -100,11 +100,11 @@ def count_consecutive_states( if arr.size == 1: return float(arr[0]) if not np.isclose(arr[0], 0, atol=epsilon) else 0.0 - # Return 0 if final state is off + # Return 0 if final state is inactive if np.isclose(arr[-1], 0, atol=epsilon): return 0.0 - # Find the last zero position (treat NaNs as off) + # Find the last zero position (treat NaNs as inactive) arr = np.nan_to_num(arr, nan=0.0) is_zero = np.isclose(arr, 0, atol=epsilon) zero_indices = np.where(is_zero)[0] @@ -123,7 +123,7 @@ def compute_consecutive_hours_in_state( epsilon: float = None, ) -> float: """ - Computes the final consecutive duration in state 'on' (=1) in hours. + Computes the final consecutive duration in state 'active' (=1) in hours. Args: binary_values: Binary DataArray with 'time' dim, or scalar/array @@ -131,7 +131,7 @@ def compute_consecutive_hours_in_state( epsilon: Tolerance for zero detection (uses CONFIG.Modeling.epsilon if None) Returns: - The duration of the final consecutive 'on' period in hours + The duration of the final consecutive 'active' period in hours """ if not isinstance(hours_per_timestep, (int, float)): raise TypeError(f'hours_per_timestep must be a scalar, got {type(hours_per_timestep)}') @@ -159,14 +159,14 @@ def compute_previous_off_duration( previous_values: xr.DataArray, hours_per_step: xr.DataArray | float | int ) -> float: """ - Compute previous consecutive 'off' duration. + Compute previous consecutive 'inactive' duration. Args: previous_values: DataArray with 'time' dimension hours_per_step: Duration of each timestep in hours Returns: - Previous consecutive off duration in hours + Previous consecutive inactive duration in hours """ if previous_values is None or previous_values.size == 0: return 0.0 @@ -199,22 +199,28 @@ class ModelingPrimitives: @staticmethod def expression_tracking_variable( model: Submodel, - tracked_expression, + tracked_expression: linopy.expressions.LinearExpression | linopy.Variable, name: str = None, short_name: str = None, bounds: tuple[xr.DataArray, xr.DataArray] = None, coords: str | list[str] | None = None, ) -> tuple[linopy.Variable, linopy.Constraint]: - """ - Creates variable that equals a given expression. + """Creates a variable constrained to equal a given expression. Mathematical formulation: tracker = expression - lower ≤ tracker ≤ upper (if bounds provided) + lower ≤ tracker ≤ upper (if bounds provided) + + Args: + model: The submodel to add variables and constraints to + tracked_expression: Expression that the tracker variable must equal + name: Full name for the variable and constraint + short_name: Short name for display purposes + bounds: Optional (lower_bound, upper_bound) tuple for the tracker variable + coords: Coordinate dimensions for the variable (None uses all model coords) Returns: - variables: {'tracker': tracker_var} - constraints: {'tracking': constraint} + Tuple of (tracker_variable, tracking_constraint) """ if not isinstance(model, Submodel): raise ValueError('ModelingPrimitives.expression_tracking_variable() can only be used with a Submodel') @@ -238,7 +244,7 @@ def expression_tracking_variable( @staticmethod def consecutive_duration_tracking( model: Submodel, - state_variable: linopy.Variable, + state: linopy.Variable, name: str = None, short_name: str = None, minimum_duration: xr.DataArray | None = None, @@ -246,29 +252,37 @@ def consecutive_duration_tracking( duration_dim: str = 'time', duration_per_step: int | float | xr.DataArray = None, previous_duration: xr.DataArray = 0, - ) -> tuple[linopy.Variable, tuple[linopy.Constraint, linopy.Constraint, linopy.Constraint]]: - """ - Creates consecutive duration tracking for a binary state variable. + ) -> tuple[dict[str, linopy.Variable], dict[str, linopy.Constraint]]: + """Creates consecutive duration tracking for a binary state variable. + + Tracks how long a binary state has been continuously active (=1). + Duration resets to 0 when state becomes inactive (=0). Mathematical formulation: - duration[t] ≤ state[t] * M ∀t + duration[t] ≤ state[t] · M ∀t duration[t+1] ≤ duration[t] + duration_per_step[t] ∀t - duration[t+1] ≥ duration[t] + duration_per_step[t] + (state[t+1] - 1) * M ∀t - duration[0] = (duration_per_step[0] + previous_duration) * state[0] + duration[t+1] ≥ duration[t] + duration_per_step[t] + (state[t+1] - 1) · M ∀t + duration[0] = (duration_per_step[0] + previous_duration) · state[0] If minimum_duration provided: - duration[t] ≥ (state[t-1] - state[t]) * minimum_duration[t-1] ∀t > 0 + duration[t] ≥ (state[t-1] - state[t]) · minimum_duration[t-1] ∀t > 0 + + Where M is a big-M value (sum of all duration_per_step + previous_duration). Args: - name: Name of the duration variable - state_variable: Binary state variable to track duration for - minimum_duration: Optional minimum consecutive duration - maximum_duration: Optional maximum consecutive duration - previous_duration: Duration from before first timestep + model: The submodel to add variables and constraints to + state: Binary state variable (1=active, 0=inactive) to track duration for + name: Full name for the duration variable + short_name: Short name for display purposes + minimum_duration: Optional minimum consecutive duration (enforced at state transitions) + maximum_duration: Optional maximum consecutive duration (upper bound on duration variable) + duration_dim: Dimension name to track duration along (default 'time') + duration_per_step: Time increment per step in duration_dim + previous_duration: Initial duration value before first timestep (default 0) Returns: - variables: {'duration': duration_var} - constraints: {'ub': constraint, 'forward': constraint, 'backward': constraint, ...} + Tuple of (duration_variable, constraints_dict) + where constraints_dict contains: 'ub', 'forward', 'backward', 'initial', and optionally 'lb', 'initial_lb' """ if not isinstance(model, Submodel): raise ValueError('ModelingPrimitives.consecutive_duration_tracking() can only be used with a Submodel') @@ -279,7 +293,7 @@ def consecutive_duration_tracking( duration = model.add_variables( lower=0, upper=maximum_duration if maximum_duration is not None else mega, - coords=state_variable.coords, + coords=state.coords, name=name, short_name=short_name, ) @@ -287,7 +301,7 @@ def consecutive_duration_tracking( constraints = {} # Upper bound: duration[t] ≤ state[t] * M - constraints['ub'] = model.add_constraints(duration <= state_variable * mega, name=f'{duration.name}|ub') + constraints['ub'] = model.add_constraints(duration <= state * mega, name=f'{duration.name}|ub') # Forward constraint: duration[t+1] ≤ duration[t] + duration_per_step[t] constraints['forward'] = model.add_constraints( @@ -301,14 +315,14 @@ def consecutive_duration_tracking( duration.isel({duration_dim: slice(1, None)}) >= duration.isel({duration_dim: slice(None, -1)}) + duration_per_step.isel({duration_dim: slice(None, -1)}) - + (state_variable.isel({duration_dim: slice(1, None)}) - 1) * mega, + + (state.isel({duration_dim: slice(1, None)}) - 1) * mega, name=f'{duration.name}|backward', ) # Initial condition: duration[0] = (duration_per_step[0] + previous_duration) * state[0] constraints['initial'] = model.add_constraints( duration.isel({duration_dim: 0}) - == (duration_per_step.isel({duration_dim: 0}) + previous_duration) * state_variable.isel({duration_dim: 0}), + == (duration_per_step.isel({duration_dim: 0}) + previous_duration) * state.isel({duration_dim: 0}), name=f'{duration.name}|initial', ) @@ -316,10 +330,7 @@ def consecutive_duration_tracking( if minimum_duration is not None: constraints['lb'] = model.add_constraints( duration - >= ( - state_variable.isel({duration_dim: slice(None, -1)}) - - state_variable.isel({duration_dim: slice(1, None)}) - ) + >= (state.isel({duration_dim: slice(None, -1)}) - state.isel({duration_dim: slice(1, None)})) * minimum_duration.isel({duration_dim: slice(None, -1)}), name=f'{duration.name}|lb', ) @@ -333,7 +344,7 @@ def consecutive_duration_tracking( min0 = float(minimum_duration.isel({duration_dim: 0}).max().item()) if prev > 0 and prev < min0: constraints['initial_lb'] = model.add_constraints( - state_variable.isel({duration_dim: 0}) == 1, name=f'{duration.name}|initial_lb' + state.isel({duration_dim: 0}) == 1, name=f'{duration.name}|initial_lb' ) variables = {'duration': duration} @@ -347,23 +358,21 @@ def mutual_exclusivity_constraint( tolerance: float = 1, short_name: str = 'mutual_exclusivity', ) -> linopy.Constraint: - """ - Creates mutual exclusivity constraint for binary variables. + """Creates mutual exclusivity constraint for binary variables. - Mathematical formulation: - Σ(binary_vars[i]) ≤ tolerance ∀t + Ensures at most one binary variable can be active (=1) at any time. - Ensures at most one binary variable can be 1 at any time. - Tolerance > 1.0 accounts for binary variable numerical precision. + Mathematical formulation: + Σᵢ binary_vars[i] ≤ tolerance ∀t Args: + model: The submodel to add the constraint to binary_variables: List of binary variables that should be mutually exclusive - tolerance: Upper bound - short_name: Short name of the constraint + tolerance: Upper bound on the sum (default 1, allows slight numerical tolerance) + short_name: Short name for the constraint Returns: - variables: {} (no new variables created) - constraints: {'mutual_exclusivity': constraint} + Mutual exclusivity constraint Raises: AssertionError: If fewer than 2 variables provided or variables aren't binary @@ -396,19 +405,19 @@ def basic_bounds( bounds: tuple[xr.DataArray, xr.DataArray], name: str = None, ) -> list[linopy.constraints.Constraint]: - """Create simple bounds. - variable ∈ [lower_bound, upper_bound] + """Creates simple lower and upper bounds for a variable. - Mathematical Formulation: + Mathematical formulation: lower_bound ≤ variable ≤ upper_bound Args: - model: The optimization model instance + model: The submodel to add constraints to variable: Variable to be bounded bounds: Tuple of (lower_bound, upper_bound) absolute bounds + name: Optional name prefix for constraints Returns: - List containing lower_bound and upper_bound constraints + List of [lower_constraint, upper_constraint] """ if not isinstance(model, Submodel): raise ValueError('BoundingPatterns.basic_bounds() can only be used with a Submodel') @@ -426,29 +435,28 @@ def bounds_with_state( model: Submodel, variable: linopy.Variable, bounds: tuple[xr.DataArray, xr.DataArray], - variable_state: linopy.Variable, + state: linopy.Variable, name: str = None, ) -> list[linopy.Constraint]: - """Constraint a variable to bounds, that can be escaped from to 0 by a binary variable. - variable ∈ {0, [max(ε, lower_bound), upper_bound]} + """Creates bounds controlled by a binary state variable. + + Variable is forced to 0 when state=0, bounded when state=1. - Mathematical Formulation: - - variable_state * max(ε, lower_bound) ≤ variable ≤ variable_state * upper_bound + Mathematical formulation: + state · max(ε, lower_bound) ≤ variable ≤ state · upper_bound - Use Cases: - - Investment decisions - - Unit commitment (on/off states) + Where ε is a small positive number (CONFIG.Modeling.epsilon) ensuring + numerical stability when lower_bound is 0. Args: - model: The optimization model instance + model: The submodel to add constraints to variable: Variable to be bounded - bounds: Tuple of (lower_bound, upper_bound) absolute bounds - variable_state: Binary variable controlling the bounds + bounds: Tuple of (lower_bound, upper_bound) absolute bounds when state=1 + state: Binary variable (0=force variable to 0, 1=allow bounds) + name: Optional name prefix for constraints Returns: - Tuple containing: - - variables (Dict): Empty dict - - constraints (Dict[str, linopy.Constraint]): 'ub', 'lb' + List of [lower_constraint, upper_constraint] (or [fix_constraint] if lower=upper) """ if not isinstance(model, Submodel): raise ValueError('BoundingPatterns.bounds_with_state() can only be used with a Submodel') @@ -457,13 +465,13 @@ def bounds_with_state( name = name or f'{variable.name}' if np.allclose(lower_bound, upper_bound, atol=1e-10, equal_nan=True): - fix_constraint = model.add_constraints(variable == variable_state * upper_bound, name=f'{name}|fix') + fix_constraint = model.add_constraints(variable == state * upper_bound, name=f'{name}|fix') return [fix_constraint] epsilon = np.maximum(CONFIG.Modeling.epsilon, lower_bound) - upper_constraint = model.add_constraints(variable <= variable_state * upper_bound, name=f'{name}|ub') - lower_constraint = model.add_constraints(variable >= variable_state * epsilon, name=f'{name}|lb') + upper_constraint = model.add_constraints(variable <= state * upper_bound, name=f'{name}|ub') + lower_constraint = model.add_constraints(variable >= state * epsilon, name=f'{name}|lb') return [lower_constraint, upper_constraint] @@ -475,26 +483,22 @@ def scaled_bounds( relative_bounds: tuple[xr.DataArray, xr.DataArray], name: str = None, ) -> list[linopy.Constraint]: - """Constraint a variable by scaling bounds, dependent on another variable. - variable ∈ [lower_bound * scaling_variable, upper_bound * scaling_variable] + """Creates bounds scaled by another variable. - Mathematical Formulation: - scaling_variable * lower_factor ≤ variable ≤ scaling_variable * upper_factor + Variable is bounded relative to a scaling variable (e.g., flow rate relative to size). - Use Cases: - - Flow rates bounded by equipment capacity - - Production levels scaled by plant size + Mathematical formulation: + scaling_variable · lower_factor ≤ variable ≤ scaling_variable · upper_factor Args: - model: The optimization model instance + model: The submodel to add constraints to variable: Variable to be bounded - scaling_variable: Variable that scales the bound factors - relative_bounds: Tuple of (lower_factor, upper_factor) relative to scaling variable + scaling_variable: Variable that scales the bound factors (e.g., equipment size) + relative_bounds: Tuple of (lower_factor, upper_factor) relative to scaling_variable + name: Optional name prefix for constraints Returns: - Tuple containing: - - variables (Dict): Empty dict - - constraints (Dict[str, linopy.Constraint]): 'ub', 'lb' + List of [lower_constraint, upper_constraint] (or [fix_constraint] if lower=upper) """ if not isinstance(model, Submodel): raise ValueError('BoundingPatterns.scaled_bounds() can only be used with a Submodel') @@ -517,33 +521,33 @@ def scaled_bounds_with_state( scaling_variable: linopy.Variable, relative_bounds: tuple[xr.DataArray, xr.DataArray], scaling_bounds: tuple[xr.DataArray, xr.DataArray], - variable_state: linopy.Variable, + state: linopy.Variable, name: str = None, ) -> list[linopy.Constraint]: - """Constraint a variable by scaling bounds with binary state control. + """Creates bounds scaled by a variable and controlled by a binary state. - variable ∈ {0, [max(ε, lower_relative_bound) * scaling_variable, upper_relative_bound * scaling_variable]} + Variable is forced to 0 when state=0, bounded relative to scaling_variable when state=1. - Mathematical Formulation (Big-M): - (variable_state - 1) * M_misc + scaling_variable * rel_lower ≤ variable ≤ scaling_variable * rel_upper - variable_state * big_m_lower ≤ variable ≤ variable_state * big_m_upper + Mathematical formulation (Big-M): + (state - 1) · M_misc + scaling_variable · rel_lower ≤ variable ≤ scaling_variable · rel_upper + state · big_m_lower ≤ variable ≤ state · big_m_upper Where: - M_misc = scaling_max * rel_lower - big_m_upper = scaling_max * rel_upper - big_m_lower = max(ε, scaling_min * rel_lower) + M_misc = scaling_max · rel_lower + big_m_upper = scaling_max · rel_upper + big_m_lower = max(ε, scaling_min · rel_lower) Args: - model: The optimization model instance + model: The submodel to add constraints to variable: Variable to be bounded - scaling_variable: Variable that scales the bound factors - relative_bounds: Tuple of (lower_factor, upper_factor) relative to scaling variable - scaling_bounds: Tuple of (scaling_min, scaling_max) bounds of the scaling variable - variable_state: Binary variable for on/off control + scaling_variable: Variable that scales the bound factors (e.g., equipment size) + relative_bounds: Tuple of (lower_factor, upper_factor) relative to scaling_variable + scaling_bounds: Tuple of (scaling_min, scaling_max) bounds of the scaling_variable + state: Binary variable (0=force variable to 0, 1=allow scaled bounds) name: Optional name prefix for constraints Returns: - List[linopy.Constraint]: List of constraint objects + List of [scaling_lower, scaling_upper, binary_lower, binary_upper] constraints """ if not isinstance(model, Submodel): raise ValueError('BoundingPatterns.scaled_bounds_with_state() can only be used with a Submodel') @@ -555,60 +559,69 @@ def scaled_bounds_with_state( big_m_misc = scaling_max * rel_lower scaling_lower = model.add_constraints( - variable >= (variable_state - 1) * big_m_misc + scaling_variable * rel_lower, name=f'{name}|lb2' + variable >= (state - 1) * big_m_misc + scaling_variable * rel_lower, name=f'{name}|lb2' ) scaling_upper = model.add_constraints(variable <= scaling_variable * rel_upper, name=f'{name}|ub2') big_m_upper = rel_upper * scaling_max big_m_lower = np.maximum(CONFIG.Modeling.epsilon, rel_lower * scaling_min) - binary_upper = model.add_constraints(variable_state * big_m_upper >= variable, name=f'{name}|ub1') - binary_lower = model.add_constraints(variable_state * big_m_lower <= variable, name=f'{name}|lb1') + binary_upper = model.add_constraints(state * big_m_upper >= variable, name=f'{name}|ub1') + binary_lower = model.add_constraints(state * big_m_lower <= variable, name=f'{name}|lb1') return [scaling_lower, scaling_upper, binary_lower, binary_upper] @staticmethod def state_transition_bounds( model: Submodel, - state_variable: linopy.Variable, - switch_on: linopy.Variable, - switch_off: linopy.Variable, + state: linopy.Variable, + activate: linopy.Variable, + deactivate: linopy.Variable, name: str, - previous_state=0, + previous_state: float | xr.DataArray = 0, coord: str = 'time', ) -> tuple[linopy.Constraint, linopy.Constraint, linopy.Constraint]: - """ - Creates switch-on/off variables with state transition logic. + """Creates state transition constraints for binary state variables. + + Tracks transitions between active (1) and inactive (0) states using + separate binary variables for activation and deactivation events. Mathematical formulation: - switch_on[t] - switch_off[t] = state[t] - state[t-1] ∀t > 0 - switch_on[0] - switch_off[0] = state[0] - previous_state - switch_on[t] + switch_off[t] ≤ 1 ∀t - switch_on[t], switch_off[t] ∈ {0, 1} + activate[t] - deactivate[t] = state[t] - state[t-1] ∀t > 0 + activate[0] - deactivate[0] = state[0] - previous_state + activate[t] + deactivate[t] ≤ 1 ∀t + activate[t], deactivate[t] ∈ {0, 1} + + Args: + model: The submodel to add constraints to + state: Binary state variable (0=inactive, 1=active) + activate: Binary variable for transitions from inactive to active (0→1) + deactivate: Binary variable for transitions from active to inactive (1→0) + name: Base name for constraints + previous_state: State value before first timestep (default 0) + coord: Time dimension name (default 'time') Returns: - variables: {'switch_on': binary_var, 'switch_off': binary_var} - constraints: {'transition': constraint, 'initial': constraint, 'mutex': constraint} + Tuple of (transition_constraint, initial_constraint, mutex_constraint) """ if not isinstance(model, Submodel): - raise ValueError('ModelingPrimitives.state_transition_bounds() can only be used with a Submodel') + raise ValueError('BoundingPatterns.state_transition_bounds() can only be used with a Submodel') # State transition constraints for t > 0 transition = model.add_constraints( - switch_on.isel({coord: slice(1, None)}) - switch_off.isel({coord: slice(1, None)}) - == state_variable.isel({coord: slice(1, None)}) - state_variable.isel({coord: slice(None, -1)}), + activate.isel({coord: slice(1, None)}) - deactivate.isel({coord: slice(1, None)}) + == state.isel({coord: slice(1, None)}) - state.isel({coord: slice(None, -1)}), name=f'{name}|transition', ) # Initial state transition for t = 0 initial = model.add_constraints( - switch_on.isel({coord: 0}) - switch_off.isel({coord: 0}) - == state_variable.isel({coord: 0}) - previous_state, + activate.isel({coord: 0}) - deactivate.isel({coord: 0}) == state.isel({coord: 0}) - previous_state, name=f'{name}|initial', ) - # At most one switch per timestep - mutex = model.add_constraints(switch_on + switch_off <= 1, name=f'{name}|mutex') + # At most one transition per timestep (mutual exclusivity) + mutex = model.add_constraints(activate + deactivate <= 1, name=f'{name}|mutex') return transition, initial, mutex @@ -616,63 +629,66 @@ def state_transition_bounds( def continuous_transition_bounds( model: Submodel, continuous_variable: linopy.Variable, - switch_on: linopy.Variable, - switch_off: linopy.Variable, + activate: linopy.Variable, + deactivate: linopy.Variable, name: str, max_change: float | xr.DataArray, previous_value: float | xr.DataArray = 0.0, coord: str = 'time', ) -> tuple[linopy.Constraint, linopy.Constraint, linopy.Constraint, linopy.Constraint]: - """ - Constrains a continuous variable to only change when switch variables are active. + """Constrains a continuous variable to only change during state transitions. + + Ensures a continuous variable remains constant unless a transition event occurs. + Uses Big-M formulation to enforce change bounds. Mathematical formulation: - -max_change * (switch_on[t] + switch_off[t]) <= continuous[t] - continuous[t-1] <= max_change * (switch_on[t] + switch_off[t]) ∀t > 0 - -max_change * (switch_on[0] + switch_off[0]) <= continuous[0] - previous_value <= max_change * (switch_on[0] + switch_off[0]) - switch_on[t], switch_off[t] ∈ {0, 1} + -max_change · (activate[t] + deactivate[t]) ≤ continuous[t] - continuous[t-1] ≤ max_change · (activate[t] + deactivate[t]) ∀t > 0 + -max_change · (activate[0] + deactivate[0]) ≤ continuous[0] - previous_value ≤ max_change · (activate[0] + deactivate[0]) + activate[t], deactivate[t] ∈ {0, 1} - This ensures the continuous variable can only change when switch_on or switch_off is 1. - When both switches are 0, the variable must stay exactly constant. + Behavior: + - When activate=0 and deactivate=0: variable must stay constant + - When activate=1 or deactivate=1: variable can change within ±max_change Args: model: The submodel to add constraints to - continuous_variable: The continuous variable to constrain - switch_on: Binary variable indicating when changes are allowed (typically transitions to active state) - switch_off: Binary variable indicating when changes are allowed (typically transitions to inactive state) - name: Base name for the constraints - max_change: Maximum possible change in the continuous variable (Big-M value) - previous_value: Initial value of the continuous variable before first period - coord: Coordinate name for time dimension + continuous_variable: Continuous variable to constrain + activate: Binary variable for transitions from inactive to active (0→1) + deactivate: Binary variable for transitions from active to inactive (1→0) + name: Base name for constraints + max_change: Maximum allowed change (Big-M value, should be ≥ actual max change) + previous_value: Initial value before first timestep (default 0.0) + coord: Time dimension name (default 'time') Returns: - Tuple of constraints: (transition_upper, transition_lower, initial_upper, initial_lower) + Tuple of (transition_upper, transition_lower, initial_upper, initial_lower) constraints """ if not isinstance(model, Submodel): raise ValueError('ModelingPrimitives.continuous_transition_bounds() can only be used with a Submodel') - # Transition constraints for t > 0: continuous variable can only change when switches are active + # Transition constraints for t > 0: continuous variable can only change when transitions occur transition_upper = model.add_constraints( continuous_variable.isel({coord: slice(1, None)}) - continuous_variable.isel({coord: slice(None, -1)}) - <= max_change * (switch_on.isel({coord: slice(1, None)}) + switch_off.isel({coord: slice(1, None)})), + <= max_change * (activate.isel({coord: slice(1, None)}) + deactivate.isel({coord: slice(1, None)})), name=f'{name}|transition_ub', ) transition_lower = model.add_constraints( -(continuous_variable.isel({coord: slice(1, None)}) - continuous_variable.isel({coord: slice(None, -1)})) - <= max_change * (switch_on.isel({coord: slice(1, None)}) + switch_off.isel({coord: slice(1, None)})), + <= max_change * (activate.isel({coord: slice(1, None)}) + deactivate.isel({coord: slice(1, None)})), name=f'{name}|transition_lb', ) # Initial constraints for t = 0 initial_upper = model.add_constraints( continuous_variable.isel({coord: 0}) - previous_value - <= max_change * (switch_on.isel({coord: 0}) + switch_off.isel({coord: 0})), + <= max_change * (activate.isel({coord: 0}) + deactivate.isel({coord: 0})), name=f'{name}|initial_ub', ) initial_lower = model.add_constraints( -continuous_variable.isel({coord: 0}) + previous_value - <= max_change * (switch_on.isel({coord: 0}) + switch_off.isel({coord: 0})), + <= max_change * (activate.isel({coord: 0}) + deactivate.isel({coord: 0})), name=f'{name}|initial_lb', ) diff --git a/mkdocs.yml b/mkdocs.yml index 0adba464d..7e86d9720 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -24,7 +24,7 @@ nav: - LinearConverter: user-guide/mathematical-notation/elements/LinearConverter.md - Features: - InvestParameters: user-guide/mathematical-notation/features/InvestParameters.md - - OnOffParameters: user-guide/mathematical-notation/features/OnOffParameters.md + - StatusParameters: user-guide/mathematical-notation/features/StatusParameters.md - Piecewise: user-guide/mathematical-notation/features/Piecewise.md - Effects, Penalty & Objective: user-guide/mathematical-notation/effects-penalty-objective.md - Modeling Patterns: diff --git a/tests/conftest.py b/tests/conftest.py index b7acee446..11d35f536 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -138,7 +138,7 @@ def simple(): size=50, relative_minimum=5 / 50, relative_maximum=1, - on_off_parameters=fx.OnOffParameters(), + status_parameters=fx.StatusParameters(), ), fuel_flow=fx.Flow('Q_fu', bus='Gas'), ) @@ -149,7 +149,7 @@ def complex(): return fx.linear_converters.Boiler( 'Kessel', thermal_efficiency=0.5, - on_off_parameters=fx.OnOffParameters(effects_per_running_hour={'costs': 0, 'CO2': 1000}), + status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}), thermal_flow=fx.Flow( 'Q_th', bus='Fernwärme', @@ -164,14 +164,14 @@ def complex(): mandatory=True, effects_of_investment_per_size={'costs': 10, 'PE': 2}, ), - on_off_parameters=fx.OnOffParameters( - on_hours_min=0, - on_hours_max=1000, - consecutive_on_hours_max=10, - consecutive_on_hours_min=1, - consecutive_off_hours_max=10, - effects_per_switch_on=0.01, - switch_on_max=1000, + status_parameters=fx.StatusParameters( + active_hours_min=0, + active_hours_max=1000, + max_uptime=10, + min_uptime=1, + max_downtime=10, + effects_per_startup=0.01, + startup_limit=1000, ), flow_hours_max=1e6, ), @@ -187,7 +187,7 @@ def simple(): thermal_efficiency=0.5, electrical_efficiency=0.4, electrical_flow=fx.Flow( - 'P_el', bus='Strom', size=60, relative_minimum=5 / 60, on_off_parameters=fx.OnOffParameters() + 'P_el', bus='Strom', size=60, relative_minimum=5 / 60, status_parameters=fx.StatusParameters() ), thermal_flow=fx.Flow('Q_th', bus='Fernwärme'), fuel_flow=fx.Flow('Q_fu', bus='Gas'), @@ -200,7 +200,7 @@ def base(): 'KWK', thermal_efficiency=0.5, electrical_efficiency=0.4, - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=0.01), + status_parameters=fx.StatusParameters(effects_per_startup=0.01), electrical_flow=fx.Flow('P_el', bus='Strom', size=60, relative_minimum=5 / 60, previous_flow_rate=10), thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=1e3), fuel_flow=fx.Flow('Q_fu', bus='Gas', size=1e3), @@ -224,7 +224,7 @@ def piecewise(): 'Q_fu': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]), } ), - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=0.01), + status_parameters=fx.StatusParameters(effects_per_startup=0.01), ) @staticmethod @@ -249,7 +249,7 @@ def segments(timesteps_length): 'Q_fu': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]), } ), - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=0.01), + status_parameters=fx.StatusParameters(effects_per_startup=0.01), ) @@ -604,14 +604,14 @@ def flow_system_long(): size=95, relative_minimum=12 / 95, previous_flow_rate=0, - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=1000), + status_parameters=fx.StatusParameters(effects_per_startup=1000), ), ), fx.linear_converters.CHP( 'BHKW2', thermal_efficiency=0.58, electrical_efficiency=0.22, - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=24000), + status_parameters=fx.StatusParameters(effects_per_startup=24000), electrical_flow=fx.Flow('P_el', bus='Strom'), thermal_flow=fx.Flow('Q_th', bus='Fernwärme'), fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=288, relative_minimum=87 / 288), diff --git a/tests/test_component.py b/tests/test_component.py index c33aaf437..41d39b12a 100644 --- a/tests/test_component.py +++ b/tests/test_component.py @@ -82,7 +82,7 @@ def test_on_with_multiple_flows(self, basic_flow_system_linopy_coords, coords_co fx.Flow('Out2', 'Gas', relative_minimum=np.ones(10) * 0.3, relative_maximum=ub_out2, size=300), ] comp = flixopt.elements.Component( - 'TestComponent', inputs=inputs, outputs=outputs, on_off_parameters=fx.OnOffParameters() + 'TestComponent', inputs=inputs, outputs=outputs, status_parameters=fx.StatusParameters() ) flow_system.add_elements(comp) model = create_linopy_model(flow_system) @@ -92,18 +92,18 @@ def test_on_with_multiple_flows(self, basic_flow_system_linopy_coords, coords_co { 'TestComponent(In1)|flow_rate', 'TestComponent(In1)|total_flow_hours', - 'TestComponent(In1)|on', - 'TestComponent(In1)|on_hours_total', + 'TestComponent(In1)|status', + 'TestComponent(In1)|active_hours', 'TestComponent(Out1)|flow_rate', 'TestComponent(Out1)|total_flow_hours', - 'TestComponent(Out1)|on', - 'TestComponent(Out1)|on_hours_total', + 'TestComponent(Out1)|status', + 'TestComponent(Out1)|active_hours', 'TestComponent(Out2)|flow_rate', 'TestComponent(Out2)|total_flow_hours', - 'TestComponent(Out2)|on', - 'TestComponent(Out2)|on_hours_total', - 'TestComponent|on', - 'TestComponent|on_hours_total', + 'TestComponent(Out2)|status', + 'TestComponent(Out2)|active_hours', + 'TestComponent|status', + 'TestComponent|active_hours', }, msg='Incorrect variables', ) @@ -114,18 +114,18 @@ def test_on_with_multiple_flows(self, basic_flow_system_linopy_coords, coords_co 'TestComponent(In1)|total_flow_hours', 'TestComponent(In1)|flow_rate|lb', 'TestComponent(In1)|flow_rate|ub', - 'TestComponent(In1)|on_hours_total', + 'TestComponent(In1)|active_hours', 'TestComponent(Out1)|total_flow_hours', 'TestComponent(Out1)|flow_rate|lb', 'TestComponent(Out1)|flow_rate|ub', - 'TestComponent(Out1)|on_hours_total', + 'TestComponent(Out1)|active_hours', 'TestComponent(Out2)|total_flow_hours', 'TestComponent(Out2)|flow_rate|lb', 'TestComponent(Out2)|flow_rate|ub', - 'TestComponent(Out2)|on_hours_total', - 'TestComponent|on|lb', - 'TestComponent|on|ub', - 'TestComponent|on_hours_total', + 'TestComponent(Out2)|active_hours', + 'TestComponent|status|lb', + 'TestComponent|status|ub', + 'TestComponent|active_hours', }, msg='Incorrect constraints', ) @@ -138,36 +138,39 @@ def test_on_with_multiple_flows(self, basic_flow_system_linopy_coords, coords_co model['TestComponent(Out2)|flow_rate'], model.add_variables(lower=0, upper=300 * upper_bound_flow_rate, coords=model.get_coords()), ) - assert_var_equal(model['TestComponent|on'], model.add_variables(binary=True, coords=model.get_coords())) - assert_var_equal(model['TestComponent(Out2)|on'], model.add_variables(binary=True, coords=model.get_coords())) + assert_var_equal(model['TestComponent|status'], model.add_variables(binary=True, coords=model.get_coords())) + assert_var_equal( + model['TestComponent(Out2)|status'], model.add_variables(binary=True, coords=model.get_coords()) + ) assert_conequal( model.constraints['TestComponent(Out2)|flow_rate|lb'], - model.variables['TestComponent(Out2)|flow_rate'] >= model.variables['TestComponent(Out2)|on'] * 0.3 * 300, + model.variables['TestComponent(Out2)|flow_rate'] + >= model.variables['TestComponent(Out2)|status'] * 0.3 * 300, ) assert_conequal( model.constraints['TestComponent(Out2)|flow_rate|ub'], model.variables['TestComponent(Out2)|flow_rate'] - <= model.variables['TestComponent(Out2)|on'] * 300 * upper_bound_flow_rate, + <= model.variables['TestComponent(Out2)|status'] * 300 * upper_bound_flow_rate, ) assert_conequal( - model.constraints['TestComponent|on|lb'], - model.variables['TestComponent|on'] + model.constraints['TestComponent|status|lb'], + model.variables['TestComponent|status'] >= ( - model.variables['TestComponent(In1)|on'] - + model.variables['TestComponent(Out1)|on'] - + model.variables['TestComponent(Out2)|on'] + model.variables['TestComponent(In1)|status'] + + model.variables['TestComponent(Out1)|status'] + + model.variables['TestComponent(Out2)|status'] ) / (3 + 1e-5), ) assert_conequal( - model.constraints['TestComponent|on|ub'], - model.variables['TestComponent|on'] + model.constraints['TestComponent|status|ub'], + model.variables['TestComponent|status'] <= ( - model.variables['TestComponent(In1)|on'] - + model.variables['TestComponent(Out1)|on'] - + model.variables['TestComponent(Out2)|on'] + model.variables['TestComponent(In1)|status'] + + model.variables['TestComponent(Out1)|status'] + + model.variables['TestComponent(Out2)|status'] ) + 1e-5, ) @@ -180,7 +183,7 @@ def test_on_with_single_flow(self, basic_flow_system_linopy_coords, coords_confi ] outputs = [] comp = flixopt.elements.Component( - 'TestComponent', inputs=inputs, outputs=outputs, on_off_parameters=fx.OnOffParameters() + 'TestComponent', inputs=inputs, outputs=outputs, status_parameters=fx.StatusParameters() ) flow_system.add_elements(comp) model = create_linopy_model(flow_system) @@ -190,10 +193,10 @@ def test_on_with_single_flow(self, basic_flow_system_linopy_coords, coords_confi { 'TestComponent(In1)|flow_rate', 'TestComponent(In1)|total_flow_hours', - 'TestComponent(In1)|on', - 'TestComponent(In1)|on_hours_total', - 'TestComponent|on', - 'TestComponent|on_hours_total', + 'TestComponent(In1)|status', + 'TestComponent(In1)|active_hours', + 'TestComponent|status', + 'TestComponent|active_hours', }, msg='Incorrect variables', ) @@ -204,9 +207,9 @@ def test_on_with_single_flow(self, basic_flow_system_linopy_coords, coords_confi 'TestComponent(In1)|total_flow_hours', 'TestComponent(In1)|flow_rate|lb', 'TestComponent(In1)|flow_rate|ub', - 'TestComponent(In1)|on_hours_total', - 'TestComponent|on', - 'TestComponent|on_hours_total', + 'TestComponent(In1)|active_hours', + 'TestComponent|status', + 'TestComponent|active_hours', }, msg='Incorrect constraints', ) @@ -214,21 +217,23 @@ def test_on_with_single_flow(self, basic_flow_system_linopy_coords, coords_confi assert_var_equal( model['TestComponent(In1)|flow_rate'], model.add_variables(lower=0, upper=100, coords=model.get_coords()) ) - assert_var_equal(model['TestComponent|on'], model.add_variables(binary=True, coords=model.get_coords())) - assert_var_equal(model['TestComponent(In1)|on'], model.add_variables(binary=True, coords=model.get_coords())) + assert_var_equal(model['TestComponent|status'], model.add_variables(binary=True, coords=model.get_coords())) + assert_var_equal( + model['TestComponent(In1)|status'], model.add_variables(binary=True, coords=model.get_coords()) + ) assert_conequal( model.constraints['TestComponent(In1)|flow_rate|lb'], - model.variables['TestComponent(In1)|flow_rate'] >= model.variables['TestComponent(In1)|on'] * 0.1 * 100, + model.variables['TestComponent(In1)|flow_rate'] >= model.variables['TestComponent(In1)|status'] * 0.1 * 100, ) assert_conequal( model.constraints['TestComponent(In1)|flow_rate|ub'], - model.variables['TestComponent(In1)|flow_rate'] <= model.variables['TestComponent(In1)|on'] * 100, + model.variables['TestComponent(In1)|flow_rate'] <= model.variables['TestComponent(In1)|status'] * 100, ) assert_conequal( - model.constraints['TestComponent|on'], - model.variables['TestComponent|on'] == model.variables['TestComponent(In1)|on'], + model.constraints['TestComponent|status'], + model.variables['TestComponent|status'] == model.variables['TestComponent(In1)|status'], ) def test_previous_states_with_multiple_flows(self, basic_flow_system_linopy_coords, coords_config): @@ -257,7 +262,7 @@ def test_previous_states_with_multiple_flows(self, basic_flow_system_linopy_coor ), ] comp = flixopt.elements.Component( - 'TestComponent', inputs=inputs, outputs=outputs, on_off_parameters=fx.OnOffParameters() + 'TestComponent', inputs=inputs, outputs=outputs, status_parameters=fx.StatusParameters() ) flow_system.add_elements(comp) model = create_linopy_model(flow_system) @@ -267,18 +272,18 @@ def test_previous_states_with_multiple_flows(self, basic_flow_system_linopy_coor { 'TestComponent(In1)|flow_rate', 'TestComponent(In1)|total_flow_hours', - 'TestComponent(In1)|on', - 'TestComponent(In1)|on_hours_total', + 'TestComponent(In1)|status', + 'TestComponent(In1)|active_hours', 'TestComponent(Out1)|flow_rate', 'TestComponent(Out1)|total_flow_hours', - 'TestComponent(Out1)|on', - 'TestComponent(Out1)|on_hours_total', + 'TestComponent(Out1)|status', + 'TestComponent(Out1)|active_hours', 'TestComponent(Out2)|flow_rate', 'TestComponent(Out2)|total_flow_hours', - 'TestComponent(Out2)|on', - 'TestComponent(Out2)|on_hours_total', - 'TestComponent|on', - 'TestComponent|on_hours_total', + 'TestComponent(Out2)|status', + 'TestComponent(Out2)|active_hours', + 'TestComponent|status', + 'TestComponent|active_hours', }, msg='Incorrect variables', ) @@ -289,18 +294,18 @@ def test_previous_states_with_multiple_flows(self, basic_flow_system_linopy_coor 'TestComponent(In1)|total_flow_hours', 'TestComponent(In1)|flow_rate|lb', 'TestComponent(In1)|flow_rate|ub', - 'TestComponent(In1)|on_hours_total', + 'TestComponent(In1)|active_hours', 'TestComponent(Out1)|total_flow_hours', 'TestComponent(Out1)|flow_rate|lb', 'TestComponent(Out1)|flow_rate|ub', - 'TestComponent(Out1)|on_hours_total', + 'TestComponent(Out1)|active_hours', 'TestComponent(Out2)|total_flow_hours', 'TestComponent(Out2)|flow_rate|lb', 'TestComponent(Out2)|flow_rate|ub', - 'TestComponent(Out2)|on_hours_total', - 'TestComponent|on|lb', - 'TestComponent|on|ub', - 'TestComponent|on_hours_total', + 'TestComponent(Out2)|active_hours', + 'TestComponent|status|lb', + 'TestComponent|status|ub', + 'TestComponent|active_hours', }, msg='Incorrect constraints', ) @@ -313,36 +318,39 @@ def test_previous_states_with_multiple_flows(self, basic_flow_system_linopy_coor model['TestComponent(Out2)|flow_rate'], model.add_variables(lower=0, upper=300 * upper_bound_flow_rate, coords=model.get_coords()), ) - assert_var_equal(model['TestComponent|on'], model.add_variables(binary=True, coords=model.get_coords())) - assert_var_equal(model['TestComponent(Out2)|on'], model.add_variables(binary=True, coords=model.get_coords())) + assert_var_equal(model['TestComponent|status'], model.add_variables(binary=True, coords=model.get_coords())) + assert_var_equal( + model['TestComponent(Out2)|status'], model.add_variables(binary=True, coords=model.get_coords()) + ) assert_conequal( model.constraints['TestComponent(Out2)|flow_rate|lb'], - model.variables['TestComponent(Out2)|flow_rate'] >= model.variables['TestComponent(Out2)|on'] * 0.3 * 300, + model.variables['TestComponent(Out2)|flow_rate'] + >= model.variables['TestComponent(Out2)|status'] * 0.3 * 300, ) assert_conequal( model.constraints['TestComponent(Out2)|flow_rate|ub'], model.variables['TestComponent(Out2)|flow_rate'] - <= model.variables['TestComponent(Out2)|on'] * 300 * upper_bound_flow_rate, + <= model.variables['TestComponent(Out2)|status'] * 300 * upper_bound_flow_rate, ) assert_conequal( - model.constraints['TestComponent|on|lb'], - model.variables['TestComponent|on'] + model.constraints['TestComponent|status|lb'], + model.variables['TestComponent|status'] >= ( - model.variables['TestComponent(In1)|on'] - + model.variables['TestComponent(Out1)|on'] - + model.variables['TestComponent(Out2)|on'] + model.variables['TestComponent(In1)|status'] + + model.variables['TestComponent(Out1)|status'] + + model.variables['TestComponent(Out2)|status'] ) / (3 + 1e-5), ) assert_conequal( - model.constraints['TestComponent|on|ub'], - model.variables['TestComponent|on'] + model.constraints['TestComponent|status|ub'], + model.variables['TestComponent|status'] <= ( - model.variables['TestComponent(In1)|on'] - + model.variables['TestComponent(Out1)|on'] - + model.variables['TestComponent(Out2)|on'] + model.variables['TestComponent(In1)|status'] + + model.variables['TestComponent(Out1)|status'] + + model.variables['TestComponent(Out2)|status'] ) + 1e-5, ) @@ -377,7 +385,7 @@ def test_previous_states_with_multiple_flows_parameterized( relative_minimum=np.ones(10) * 0.1, size=100, previous_flow_rate=in1_previous_flow_rate, - on_off_parameters=fx.OnOffParameters(consecutive_on_hours_min=3), + status_parameters=fx.StatusParameters(min_uptime=3), ), ] outputs = [ @@ -397,15 +405,15 @@ def test_previous_states_with_multiple_flows_parameterized( 'TestComponent', inputs=inputs, outputs=outputs, - on_off_parameters=fx.OnOffParameters(consecutive_on_hours_min=3), + status_parameters=fx.StatusParameters(min_uptime=3), ) flow_system.add_elements(comp) create_linopy_model(flow_system) assert_conequal( - comp.submodel.constraints['TestComponent|consecutive_on_hours|initial'], - comp.submodel.variables['TestComponent|consecutive_on_hours'].isel(time=0) - == comp.submodel.variables['TestComponent|on'].isel(time=0) * (previous_on_hours + 1), + comp.submodel.constraints['TestComponent|uptime|initial'], + comp.submodel.variables['TestComponent|uptime'].isel(time=0) + == comp.submodel.variables['TestComponent|status'].isel(time=0) * (previous_on_hours + 1), ) @@ -438,9 +446,9 @@ def test_transmission_basic(self, basic_flow_system, highs_solver): # Assertions assert_almost_equal_numeric( - transmission.in1.submodel.on_off.on.solution.values, + transmission.in1.submodel.status.status.solution.values, np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1]), - 'On does not work properly', + 'Status does not work properly', ) assert_almost_equal_numeric( @@ -502,9 +510,9 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver): # Assertions assert_almost_equal_numeric( - transmission.in1.submodel.on_off.on.solution.values, + transmission.in1.submodel.status.status.solution.values, np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0]), - 'On does not work properly', + 'Status does not work properly', ) assert_almost_equal_numeric( @@ -583,9 +591,9 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver): # Assertions assert_almost_equal_numeric( - transmission.in1.submodel.on_off.on.solution.values, + transmission.in1.submodel.status.status.solution.values, np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0]), - 'On does not work properly', + 'Status does not work properly', ) assert_almost_equal_numeric( diff --git a/tests/test_flow.py b/tests/test_flow.py index 3017b25dd..0a1a03341 100644 --- a/tests/test_flow.py +++ b/tests/test_flow.py @@ -524,14 +524,14 @@ def test_flow_on(self, basic_flow_system_linopy_coords, coords_config): size=100, relative_minimum=0.2, relative_maximum=0.8, - on_off_parameters=fx.OnOffParameters(), + status_parameters=fx.StatusParameters(), ) flow_system.add_elements(fx.Sink('Sink', inputs=[flow])) model = create_linopy_model(flow_system) assert_sets_equal( set(flow.submodel.variables), - {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate', 'Sink(Wärme)|on', 'Sink(Wärme)|on_hours_total'}, + {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate', 'Sink(Wärme)|status', 'Sink(Wärme)|active_hours'}, msg='Incorrect variables', ) @@ -539,7 +539,7 @@ def test_flow_on(self, basic_flow_system_linopy_coords, coords_config): set(flow.submodel.constraints), { 'Sink(Wärme)|total_flow_hours', - 'Sink(Wärme)|on_hours_total', + 'Sink(Wärme)|active_hours', 'Sink(Wärme)|flow_rate|lb', 'Sink(Wärme)|flow_rate|ub', }, @@ -555,31 +555,35 @@ def test_flow_on(self, basic_flow_system_linopy_coords, coords_config): ), ) - # OnOff + # Status assert_var_equal( - flow.submodel.on_off.on, + flow.submodel.status.status, model.add_variables(binary=True, coords=model.get_coords()), ) + # Upper bound is total hours when active_hours_max is not specified + total_hours = model.hours_per_step.sum('time') assert_var_equal( - model.variables['Sink(Wärme)|on_hours_total'], - model.add_variables(lower=0, coords=model.get_coords(['period', 'scenario'])), + model.variables['Sink(Wärme)|active_hours'], + model.add_variables(lower=0, upper=total_hours, coords=model.get_coords(['period', 'scenario'])), ) assert_conequal( model.constraints['Sink(Wärme)|flow_rate|lb'], - flow.submodel.variables['Sink(Wärme)|flow_rate'] >= flow.submodel.variables['Sink(Wärme)|on'] * 0.2 * 100, + flow.submodel.variables['Sink(Wärme)|flow_rate'] + >= flow.submodel.variables['Sink(Wärme)|status'] * 0.2 * 100, ) assert_conequal( model.constraints['Sink(Wärme)|flow_rate|ub'], - flow.submodel.variables['Sink(Wärme)|flow_rate'] <= flow.submodel.variables['Sink(Wärme)|on'] * 0.8 * 100, + flow.submodel.variables['Sink(Wärme)|flow_rate'] + <= flow.submodel.variables['Sink(Wärme)|status'] * 0.8 * 100, ) assert_conequal( - model.constraints['Sink(Wärme)|on_hours_total'], - flow.submodel.variables['Sink(Wärme)|on_hours_total'] - == (flow.submodel.variables['Sink(Wärme)|on'] * model.hours_per_step).sum('time'), + model.constraints['Sink(Wärme)|active_hours'], + flow.submodel.variables['Sink(Wärme)|active_hours'] + == (flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step).sum('time'), ) - def test_effects_per_running_hour(self, basic_flow_system_linopy_coords, coords_config): + def test_effects_per_active_hour(self, basic_flow_system_linopy_coords, coords_config): flow_system, coords_config = basic_flow_system_linopy_coords, coords_config timesteps = flow_system.timesteps @@ -589,8 +593,8 @@ def test_effects_per_running_hour(self, basic_flow_system_linopy_coords, coords_ flow = fx.Flow( 'Wärme', bus='Fernwärme', - on_off_parameters=fx.OnOffParameters( - effects_per_running_hour={'costs': costs_per_running_hour, 'CO2': co2_per_running_hour} + status_parameters=fx.StatusParameters( + effects_per_active_hour={'costs': costs_per_running_hour, 'CO2': co2_per_running_hour} ), ) flow_system.add_elements(fx.Sink('Sink', inputs=[flow]), fx.Effect('CO2', 't', '')) @@ -602,8 +606,8 @@ def test_effects_per_running_hour(self, basic_flow_system_linopy_coords, coords_ { 'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate', - 'Sink(Wärme)|on', - 'Sink(Wärme)|on_hours_total', + 'Sink(Wärme)|status', + 'Sink(Wärme)|active_hours', }, msg='Incorrect variables', ) @@ -613,7 +617,7 @@ def test_effects_per_running_hour(self, basic_flow_system_linopy_coords, coords_ 'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate|lb', 'Sink(Wärme)|flow_rate|ub', - 'Sink(Wärme)|on_hours_total', + 'Sink(Wärme)|active_hours', }, msg='Incorrect constraints', ) @@ -621,8 +625,8 @@ def test_effects_per_running_hour(self, basic_flow_system_linopy_coords, coords_ assert 'Sink(Wärme)->costs(temporal)' in set(costs.submodel.constraints) assert 'Sink(Wärme)->CO2(temporal)' in set(co2.submodel.constraints) - costs_per_running_hour = flow.on_off_parameters.effects_per_running_hour['costs'] - co2_per_running_hour = flow.on_off_parameters.effects_per_running_hour['CO2'] + costs_per_running_hour = flow.status_parameters.effects_per_active_hour['costs'] + co2_per_running_hour = flow.status_parameters.effects_per_active_hour['CO2'] assert costs_per_running_hour.dims == tuple(model.get_coords()) assert co2_per_running_hour.dims == tuple(model.get_coords()) @@ -630,13 +634,13 @@ def test_effects_per_running_hour(self, basic_flow_system_linopy_coords, coords_ assert_conequal( model.constraints['Sink(Wärme)->costs(temporal)'], model.variables['Sink(Wärme)->costs(temporal)'] - == flow.submodel.variables['Sink(Wärme)|on'] * model.hours_per_step * costs_per_running_hour, + == flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step * costs_per_running_hour, ) assert_conequal( model.constraints['Sink(Wärme)->CO2(temporal)'], model.variables['Sink(Wärme)->CO2(temporal)'] - == flow.submodel.variables['Sink(Wärme)|on'] * model.hours_per_step * co2_per_running_hour, + == flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step * co2_per_running_hour, ) def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_config): @@ -647,322 +651,322 @@ def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_conf 'Wärme', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters( - consecutive_on_hours_min=2, # Must run for at least 2 hours when turned on - consecutive_on_hours_max=8, # Can't run more than 8 consecutive hours + status_parameters=fx.StatusParameters( + min_uptime=2, # Must run for at least 2 hours when turned on + max_uptime=8, # Can't run more than 8 consecutive hours ), ) flow_system.add_elements(fx.Sink('Sink', inputs=[flow])) model = create_linopy_model(flow_system) - assert {'Sink(Wärme)|consecutive_on_hours', 'Sink(Wärme)|on'}.issubset(set(flow.submodel.variables)) + assert {'Sink(Wärme)|uptime', 'Sink(Wärme)|status'}.issubset(set(flow.submodel.variables)) assert_sets_equal( { - 'Sink(Wärme)|consecutive_on_hours|ub', - 'Sink(Wärme)|consecutive_on_hours|forward', - 'Sink(Wärme)|consecutive_on_hours|backward', - 'Sink(Wärme)|consecutive_on_hours|initial', - 'Sink(Wärme)|consecutive_on_hours|lb', + 'Sink(Wärme)|uptime|ub', + 'Sink(Wärme)|uptime|forward', + 'Sink(Wärme)|uptime|backward', + 'Sink(Wärme)|uptime|initial', + 'Sink(Wärme)|uptime|lb', } & set(flow.submodel.constraints), { - 'Sink(Wärme)|consecutive_on_hours|ub', - 'Sink(Wärme)|consecutive_on_hours|forward', - 'Sink(Wärme)|consecutive_on_hours|backward', - 'Sink(Wärme)|consecutive_on_hours|initial', - 'Sink(Wärme)|consecutive_on_hours|lb', + 'Sink(Wärme)|uptime|ub', + 'Sink(Wärme)|uptime|forward', + 'Sink(Wärme)|uptime|backward', + 'Sink(Wärme)|uptime|initial', + 'Sink(Wärme)|uptime|lb', }, - msg='Missing consecutive on hours constraints', + msg='Missing uptime constraints', ) assert_var_equal( - model.variables['Sink(Wärme)|consecutive_on_hours'], + model.variables['Sink(Wärme)|uptime'], model.add_variables(lower=0, upper=8, coords=model.get_coords()), ) mega = model.hours_per_step.sum('time') assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|ub'], - model.variables['Sink(Wärme)|consecutive_on_hours'] <= model.variables['Sink(Wärme)|on'] * mega, + model.constraints['Sink(Wärme)|uptime|ub'], + model.variables['Sink(Wärme)|uptime'] <= model.variables['Sink(Wärme)|status'] * mega, ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|forward'], - model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=slice(1, None)) - <= model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=slice(None, -1)) + model.constraints['Sink(Wärme)|uptime|forward'], + model.variables['Sink(Wärme)|uptime'].isel(time=slice(1, None)) + <= model.variables['Sink(Wärme)|uptime'].isel(time=slice(None, -1)) + model.hours_per_step.isel(time=slice(None, -1)), ) # eq: duration(t) >= duration(t - 1) + dt(t) + (On(t) - 1) * BIG assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|backward'], - model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=slice(1, None)) - >= model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=slice(None, -1)) + model.constraints['Sink(Wärme)|uptime|backward'], + model.variables['Sink(Wärme)|uptime'].isel(time=slice(1, None)) + >= model.variables['Sink(Wärme)|uptime'].isel(time=slice(None, -1)) + model.hours_per_step.isel(time=slice(None, -1)) - + (model.variables['Sink(Wärme)|on'].isel(time=slice(1, None)) - 1) * mega, + + (model.variables['Sink(Wärme)|status'].isel(time=slice(1, None)) - 1) * mega, ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|initial'], - model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=0) - == model.variables['Sink(Wärme)|on'].isel(time=0) * model.hours_per_step.isel(time=0), + model.constraints['Sink(Wärme)|uptime|initial'], + model.variables['Sink(Wärme)|uptime'].isel(time=0) + == model.variables['Sink(Wärme)|status'].isel(time=0) * model.hours_per_step.isel(time=0), ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|lb'], - model.variables['Sink(Wärme)|consecutive_on_hours'] + model.constraints['Sink(Wärme)|uptime|lb'], + model.variables['Sink(Wärme)|uptime'] >= ( - model.variables['Sink(Wärme)|on'].isel(time=slice(None, -1)) - - model.variables['Sink(Wärme)|on'].isel(time=slice(1, None)) + model.variables['Sink(Wärme)|status'].isel(time=slice(None, -1)) + - model.variables['Sink(Wärme)|status'].isel(time=slice(1, None)) ) * 2, ) def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, coords_config): - """Test flow with minimum and maximum consecutive on hours.""" + """Test flow with minimum and maximum uptime.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( 'Wärme', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters( - consecutive_on_hours_min=2, # Must run for at least 2 hours when turned on - consecutive_on_hours_max=8, # Can't run more than 8 consecutive hours + status_parameters=fx.StatusParameters( + min_uptime=2, # Must run for at least 2 hours when active + max_uptime=8, # Can't run more than 8 consecutive hours ), - previous_flow_rate=np.array([10, 20, 30, 0, 20, 20, 30]), # Previously on for 3 steps + previous_flow_rate=np.array([10, 20, 30, 0, 20, 20, 30]), # Previously active for 3 steps ) flow_system.add_elements(fx.Sink('Sink', inputs=[flow])) model = create_linopy_model(flow_system) - assert {'Sink(Wärme)|consecutive_on_hours', 'Sink(Wärme)|on'}.issubset(set(flow.submodel.variables)) + assert {'Sink(Wärme)|uptime', 'Sink(Wärme)|status'}.issubset(set(flow.submodel.variables)) assert_sets_equal( { - 'Sink(Wärme)|consecutive_on_hours|lb', - 'Sink(Wärme)|consecutive_on_hours|forward', - 'Sink(Wärme)|consecutive_on_hours|backward', - 'Sink(Wärme)|consecutive_on_hours|initial', + 'Sink(Wärme)|uptime|lb', + 'Sink(Wärme)|uptime|forward', + 'Sink(Wärme)|uptime|backward', + 'Sink(Wärme)|uptime|initial', } & set(flow.submodel.constraints), { - 'Sink(Wärme)|consecutive_on_hours|lb', - 'Sink(Wärme)|consecutive_on_hours|forward', - 'Sink(Wärme)|consecutive_on_hours|backward', - 'Sink(Wärme)|consecutive_on_hours|initial', + 'Sink(Wärme)|uptime|lb', + 'Sink(Wärme)|uptime|forward', + 'Sink(Wärme)|uptime|backward', + 'Sink(Wärme)|uptime|initial', }, - msg='Missing consecutive on hours constraints for previous states', + msg='Missing uptime constraints for previous states', ) assert_var_equal( - model.variables['Sink(Wärme)|consecutive_on_hours'], + model.variables['Sink(Wärme)|uptime'], model.add_variables(lower=0, upper=8, coords=model.get_coords()), ) mega = model.hours_per_step.sum('time') + model.hours_per_step.isel(time=0) * 3 assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|ub'], - model.variables['Sink(Wärme)|consecutive_on_hours'] <= model.variables['Sink(Wärme)|on'] * mega, + model.constraints['Sink(Wärme)|uptime|ub'], + model.variables['Sink(Wärme)|uptime'] <= model.variables['Sink(Wärme)|status'] * mega, ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|forward'], - model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=slice(1, None)) - <= model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=slice(None, -1)) + model.constraints['Sink(Wärme)|uptime|forward'], + model.variables['Sink(Wärme)|uptime'].isel(time=slice(1, None)) + <= model.variables['Sink(Wärme)|uptime'].isel(time=slice(None, -1)) + model.hours_per_step.isel(time=slice(None, -1)), ) # eq: duration(t) >= duration(t - 1) + dt(t) + (On(t) - 1) * BIG assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|backward'], - model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=slice(1, None)) - >= model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=slice(None, -1)) + model.constraints['Sink(Wärme)|uptime|backward'], + model.variables['Sink(Wärme)|uptime'].isel(time=slice(1, None)) + >= model.variables['Sink(Wärme)|uptime'].isel(time=slice(None, -1)) + model.hours_per_step.isel(time=slice(None, -1)) - + (model.variables['Sink(Wärme)|on'].isel(time=slice(1, None)) - 1) * mega, + + (model.variables['Sink(Wärme)|status'].isel(time=slice(1, None)) - 1) * mega, ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|initial'], - model.variables['Sink(Wärme)|consecutive_on_hours'].isel(time=0) - == model.variables['Sink(Wärme)|on'].isel(time=0) * (model.hours_per_step.isel(time=0) * (1 + 3)), + model.constraints['Sink(Wärme)|uptime|initial'], + model.variables['Sink(Wärme)|uptime'].isel(time=0) + == model.variables['Sink(Wärme)|status'].isel(time=0) * (model.hours_per_step.isel(time=0) * (1 + 3)), ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_on_hours|lb'], - model.variables['Sink(Wärme)|consecutive_on_hours'] + model.constraints['Sink(Wärme)|uptime|lb'], + model.variables['Sink(Wärme)|uptime'] >= ( - model.variables['Sink(Wärme)|on'].isel(time=slice(None, -1)) - - model.variables['Sink(Wärme)|on'].isel(time=slice(1, None)) + model.variables['Sink(Wärme)|status'].isel(time=slice(None, -1)) + - model.variables['Sink(Wärme)|status'].isel(time=slice(1, None)) ) * 2, ) def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_config): - """Test flow with minimum and maximum consecutive off hours.""" + """Test flow with minimum and maximum consecutive inactive hours.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( 'Wärme', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters( - consecutive_off_hours_min=4, # Must stay off for at least 4 hours when shut down - consecutive_off_hours_max=12, # Can't be off for more than 12 consecutive hours + status_parameters=fx.StatusParameters( + min_downtime=4, # Must stay inactive for at least 4 hours when shut down + max_downtime=12, # Can't be inactive for more than 12 consecutive hours ), ) flow_system.add_elements(fx.Sink('Sink', inputs=[flow])) model = create_linopy_model(flow_system) - assert {'Sink(Wärme)|consecutive_off_hours', 'Sink(Wärme)|off'}.issubset(set(flow.submodel.variables)) + assert {'Sink(Wärme)|downtime', 'Sink(Wärme)|inactive'}.issubset(set(flow.submodel.variables)) assert_sets_equal( { - 'Sink(Wärme)|consecutive_off_hours|ub', - 'Sink(Wärme)|consecutive_off_hours|forward', - 'Sink(Wärme)|consecutive_off_hours|backward', - 'Sink(Wärme)|consecutive_off_hours|initial', - 'Sink(Wärme)|consecutive_off_hours|lb', + 'Sink(Wärme)|downtime|ub', + 'Sink(Wärme)|downtime|forward', + 'Sink(Wärme)|downtime|backward', + 'Sink(Wärme)|downtime|initial', + 'Sink(Wärme)|downtime|lb', } & set(flow.submodel.constraints), { - 'Sink(Wärme)|consecutive_off_hours|ub', - 'Sink(Wärme)|consecutive_off_hours|forward', - 'Sink(Wärme)|consecutive_off_hours|backward', - 'Sink(Wärme)|consecutive_off_hours|initial', - 'Sink(Wärme)|consecutive_off_hours|lb', + 'Sink(Wärme)|downtime|ub', + 'Sink(Wärme)|downtime|forward', + 'Sink(Wärme)|downtime|backward', + 'Sink(Wärme)|downtime|initial', + 'Sink(Wärme)|downtime|lb', }, - msg='Missing consecutive off hours constraints', + msg='Missing consecutive inactive hours constraints', ) assert_var_equal( - model.variables['Sink(Wärme)|consecutive_off_hours'], + model.variables['Sink(Wärme)|downtime'], model.add_variables(lower=0, upper=12, coords=model.get_coords()), ) - mega = model.hours_per_step.sum('time') + model.hours_per_step.isel(time=0) * 1 # previously off for 1h + mega = model.hours_per_step.sum('time') + model.hours_per_step.isel(time=0) * 1 # previously inactive for 1h assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|ub'], - model.variables['Sink(Wärme)|consecutive_off_hours'] <= model.variables['Sink(Wärme)|off'] * mega, + model.constraints['Sink(Wärme)|downtime|ub'], + model.variables['Sink(Wärme)|downtime'] <= model.variables['Sink(Wärme)|inactive'] * mega, ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|forward'], - model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=slice(1, None)) - <= model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=slice(None, -1)) + model.constraints['Sink(Wärme)|downtime|forward'], + model.variables['Sink(Wärme)|downtime'].isel(time=slice(1, None)) + <= model.variables['Sink(Wärme)|downtime'].isel(time=slice(None, -1)) + model.hours_per_step.isel(time=slice(None, -1)), ) # eq: duration(t) >= duration(t - 1) + dt(t) + (On(t) - 1) * BIG assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|backward'], - model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=slice(1, None)) - >= model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=slice(None, -1)) + model.constraints['Sink(Wärme)|downtime|backward'], + model.variables['Sink(Wärme)|downtime'].isel(time=slice(1, None)) + >= model.variables['Sink(Wärme)|downtime'].isel(time=slice(None, -1)) + model.hours_per_step.isel(time=slice(None, -1)) - + (model.variables['Sink(Wärme)|off'].isel(time=slice(1, None)) - 1) * mega, + + (model.variables['Sink(Wärme)|inactive'].isel(time=slice(1, None)) - 1) * mega, ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|initial'], - model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=0) - == model.variables['Sink(Wärme)|off'].isel(time=0) * (model.hours_per_step.isel(time=0) * (1 + 1)), + model.constraints['Sink(Wärme)|downtime|initial'], + model.variables['Sink(Wärme)|downtime'].isel(time=0) + == model.variables['Sink(Wärme)|inactive'].isel(time=0) * (model.hours_per_step.isel(time=0) * (1 + 1)), ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|lb'], - model.variables['Sink(Wärme)|consecutive_off_hours'] + model.constraints['Sink(Wärme)|downtime|lb'], + model.variables['Sink(Wärme)|downtime'] >= ( - model.variables['Sink(Wärme)|off'].isel(time=slice(None, -1)) - - model.variables['Sink(Wärme)|off'].isel(time=slice(1, None)) + model.variables['Sink(Wärme)|inactive'].isel(time=slice(None, -1)) + - model.variables['Sink(Wärme)|inactive'].isel(time=slice(1, None)) ) * 4, ) def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, coords_config): - """Test flow with minimum and maximum consecutive off hours.""" + """Test flow with minimum and maximum consecutive inactive hours.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( 'Wärme', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters( - consecutive_off_hours_min=4, # Must stay off for at least 4 hours when shut down - consecutive_off_hours_max=12, # Can't be off for more than 12 consecutive hours + status_parameters=fx.StatusParameters( + min_downtime=4, # Must stay inactive for at least 4 hours when shut down + max_downtime=12, # Can't be inactive for more than 12 consecutive hours ), - previous_flow_rate=np.array([10, 20, 30, 0, 20, 0, 0]), # Previously off for 2 steps + previous_flow_rate=np.array([10, 20, 30, 0, 20, 0, 0]), # Previously inactive for 2 steps ) flow_system.add_elements(fx.Sink('Sink', inputs=[flow])) model = create_linopy_model(flow_system) - assert {'Sink(Wärme)|consecutive_off_hours', 'Sink(Wärme)|off'}.issubset(set(flow.submodel.variables)) + assert {'Sink(Wärme)|downtime', 'Sink(Wärme)|inactive'}.issubset(set(flow.submodel.variables)) assert_sets_equal( { - 'Sink(Wärme)|consecutive_off_hours|ub', - 'Sink(Wärme)|consecutive_off_hours|forward', - 'Sink(Wärme)|consecutive_off_hours|backward', - 'Sink(Wärme)|consecutive_off_hours|initial', - 'Sink(Wärme)|consecutive_off_hours|lb', + 'Sink(Wärme)|downtime|ub', + 'Sink(Wärme)|downtime|forward', + 'Sink(Wärme)|downtime|backward', + 'Sink(Wärme)|downtime|initial', + 'Sink(Wärme)|downtime|lb', } & set(flow.submodel.constraints), { - 'Sink(Wärme)|consecutive_off_hours|ub', - 'Sink(Wärme)|consecutive_off_hours|forward', - 'Sink(Wärme)|consecutive_off_hours|backward', - 'Sink(Wärme)|consecutive_off_hours|initial', - 'Sink(Wärme)|consecutive_off_hours|lb', + 'Sink(Wärme)|downtime|ub', + 'Sink(Wärme)|downtime|forward', + 'Sink(Wärme)|downtime|backward', + 'Sink(Wärme)|downtime|initial', + 'Sink(Wärme)|downtime|lb', }, - msg='Missing consecutive off hours constraints for previous states', + msg='Missing consecutive inactive hours constraints for previous states', ) assert_var_equal( - model.variables['Sink(Wärme)|consecutive_off_hours'], + model.variables['Sink(Wärme)|downtime'], model.add_variables(lower=0, upper=12, coords=model.get_coords()), ) mega = model.hours_per_step.sum('time') + model.hours_per_step.isel(time=0) * 2 assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|ub'], - model.variables['Sink(Wärme)|consecutive_off_hours'] <= model.variables['Sink(Wärme)|off'] * mega, + model.constraints['Sink(Wärme)|downtime|ub'], + model.variables['Sink(Wärme)|downtime'] <= model.variables['Sink(Wärme)|inactive'] * mega, ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|forward'], - model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=slice(1, None)) - <= model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=slice(None, -1)) + model.constraints['Sink(Wärme)|downtime|forward'], + model.variables['Sink(Wärme)|downtime'].isel(time=slice(1, None)) + <= model.variables['Sink(Wärme)|downtime'].isel(time=slice(None, -1)) + model.hours_per_step.isel(time=slice(None, -1)), ) # eq: duration(t) >= duration(t - 1) + dt(t) + (On(t) - 1) * BIG assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|backward'], - model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=slice(1, None)) - >= model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=slice(None, -1)) + model.constraints['Sink(Wärme)|downtime|backward'], + model.variables['Sink(Wärme)|downtime'].isel(time=slice(1, None)) + >= model.variables['Sink(Wärme)|downtime'].isel(time=slice(None, -1)) + model.hours_per_step.isel(time=slice(None, -1)) - + (model.variables['Sink(Wärme)|off'].isel(time=slice(1, None)) - 1) * mega, + + (model.variables['Sink(Wärme)|inactive'].isel(time=slice(1, None)) - 1) * mega, ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|initial'], - model.variables['Sink(Wärme)|consecutive_off_hours'].isel(time=0) - == model.variables['Sink(Wärme)|off'].isel(time=0) * (model.hours_per_step.isel(time=0) * (1 + 2)), + model.constraints['Sink(Wärme)|downtime|initial'], + model.variables['Sink(Wärme)|downtime'].isel(time=0) + == model.variables['Sink(Wärme)|inactive'].isel(time=0) * (model.hours_per_step.isel(time=0) * (1 + 2)), ) assert_conequal( - model.constraints['Sink(Wärme)|consecutive_off_hours|lb'], - model.variables['Sink(Wärme)|consecutive_off_hours'] + model.constraints['Sink(Wärme)|downtime|lb'], + model.variables['Sink(Wärme)|downtime'] >= ( - model.variables['Sink(Wärme)|off'].isel(time=slice(None, -1)) - - model.variables['Sink(Wärme)|off'].isel(time=slice(1, None)) + model.variables['Sink(Wärme)|inactive'].isel(time=slice(None, -1)) + - model.variables['Sink(Wärme)|inactive'].isel(time=slice(1, None)) ) * 4, ) @@ -975,9 +979,9 @@ def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_con 'Wärme', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters( - switch_on_max=5, # Maximum 5 startups - effects_per_switch_on={'costs': 100}, # 100 EUR startup cost + status_parameters=fx.StatusParameters( + startup_limit=5, # Maximum 5 startups + effects_per_startup={'costs': 100}, # 100 EUR startup cost ), ) @@ -985,7 +989,7 @@ def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_con model = create_linopy_model(flow_system) # Check that variables exist - assert {'Sink(Wärme)|switch|on', 'Sink(Wärme)|switch|off', 'Sink(Wärme)|switch|count'}.issubset( + assert {'Sink(Wärme)|startup', 'Sink(Wärme)|shutdown', 'Sink(Wärme)|startup_count'}.issubset( set(flow.submodel.variables) ) @@ -995,29 +999,29 @@ def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_con 'Sink(Wärme)|switch|transition', 'Sink(Wärme)|switch|initial', 'Sink(Wärme)|switch|mutex', - 'Sink(Wärme)|switch|count', + 'Sink(Wärme)|startup_count', } & set(flow.submodel.constraints), { 'Sink(Wärme)|switch|transition', 'Sink(Wärme)|switch|initial', 'Sink(Wärme)|switch|mutex', - 'Sink(Wärme)|switch|count', + 'Sink(Wärme)|startup_count', }, msg='Missing switch constraints', ) - # Check switch_on_nr variable bounds + # Check startup_count variable bounds assert_var_equal( - flow.submodel.variables['Sink(Wärme)|switch|count'], + flow.submodel.variables['Sink(Wärme)|startup_count'], model.add_variables(lower=0, upper=5, coords=model.get_coords(['period', 'scenario'])), ) - # Verify switch_on_nr constraint (limits number of startups) + # Verify startup_count constraint (limits number of startups) assert_conequal( - model.constraints['Sink(Wärme)|switch|count'], - flow.submodel.variables['Sink(Wärme)|switch|count'] - == flow.submodel.variables['Sink(Wärme)|switch|on'].sum('time'), + model.constraints['Sink(Wärme)|startup_count'], + flow.submodel.variables['Sink(Wärme)|startup_count'] + == flow.submodel.variables['Sink(Wärme)|startup'].sum('time'), ) # Check that startup cost effect constraint exists @@ -1026,20 +1030,20 @@ def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_con # Verify the startup cost effect constraint assert_conequal( model.constraints['Sink(Wärme)->costs(temporal)'], - model.variables['Sink(Wärme)->costs(temporal)'] == flow.submodel.variables['Sink(Wärme)|switch|on'] * 100, + model.variables['Sink(Wärme)->costs(temporal)'] == flow.submodel.variables['Sink(Wärme)|startup'] * 100, ) def test_on_hours_limits(self, basic_flow_system_linopy_coords, coords_config): - """Test flow with limits on total on hours.""" + """Test flow with limits on total active hours.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( 'Wärme', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters( - on_hours_min=20, # Minimum 20 hours of operation - on_hours_max=100, # Maximum 100 hours of operation + status_parameters=fx.StatusParameters( + active_hours_min=20, # Minimum 20 hours of operation + active_hours_max=100, # Maximum 100 hours of operation ), ) @@ -1047,22 +1051,22 @@ def test_on_hours_limits(self, basic_flow_system_linopy_coords, coords_config): model = create_linopy_model(flow_system) # Check that variables exist - assert {'Sink(Wärme)|on', 'Sink(Wärme)|on_hours_total'}.issubset(set(flow.submodel.variables)) + assert {'Sink(Wärme)|status', 'Sink(Wärme)|active_hours'}.issubset(set(flow.submodel.variables)) # Check that constraints exist - assert 'Sink(Wärme)|on_hours_total' in model.constraints + assert 'Sink(Wärme)|active_hours' in model.constraints - # Check on_hours_total variable bounds + # Check active_hours variable bounds assert_var_equal( - flow.submodel.variables['Sink(Wärme)|on_hours_total'], + flow.submodel.variables['Sink(Wärme)|active_hours'], model.add_variables(lower=20, upper=100, coords=model.get_coords(['period', 'scenario'])), ) - # Check on_hours_total constraint + # Check active_hours constraint assert_conequal( - model.constraints['Sink(Wärme)|on_hours_total'], - flow.submodel.variables['Sink(Wärme)|on_hours_total'] - == (flow.submodel.variables['Sink(Wärme)|on'] * model.hours_per_step).sum('time'), + model.constraints['Sink(Wärme)|active_hours'], + flow.submodel.variables['Sink(Wärme)|active_hours'] + == (flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step).sum('time'), ) @@ -1077,7 +1081,7 @@ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_c size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=False), relative_minimum=0.2, relative_maximum=0.8, - on_off_parameters=fx.OnOffParameters(), + status_parameters=fx.StatusParameters(), ) flow_system.add_elements(fx.Sink('Sink', inputs=[flow])) model = create_linopy_model(flow_system) @@ -1089,8 +1093,8 @@ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_c 'Sink(Wärme)|flow_rate', 'Sink(Wärme)|invested', 'Sink(Wärme)|size', - 'Sink(Wärme)|on', - 'Sink(Wärme)|on_hours_total', + 'Sink(Wärme)|status', + 'Sink(Wärme)|active_hours', }, msg='Incorrect variables', ) @@ -1099,7 +1103,7 @@ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_c set(flow.submodel.constraints), { 'Sink(Wärme)|total_flow_hours', - 'Sink(Wärme)|on_hours_total', + 'Sink(Wärme)|active_hours', 'Sink(Wärme)|flow_rate|lb1', 'Sink(Wärme)|flow_rate|ub1', 'Sink(Wärme)|size|lb', @@ -1120,14 +1124,16 @@ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_c ), ) - # OnOff + # Status assert_var_equal( - flow.submodel.on_off.on, + flow.submodel.status.status, model.add_variables(binary=True, coords=model.get_coords()), ) + # Upper bound is total hours when active_hours_max is not specified + total_hours = model.hours_per_step.sum('time') assert_var_equal( - model.variables['Sink(Wärme)|on_hours_total'], - model.add_variables(lower=0, coords=model.get_coords(['period', 'scenario'])), + model.variables['Sink(Wärme)|active_hours'], + model.add_variables(lower=0, upper=total_hours, coords=model.get_coords(['period', 'scenario'])), ) assert_conequal( model.constraints['Sink(Wärme)|size|lb'], @@ -1139,16 +1145,18 @@ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_c ) assert_conequal( model.constraints['Sink(Wärme)|flow_rate|lb1'], - flow.submodel.variables['Sink(Wärme)|on'] * 0.2 * 20 <= flow.submodel.variables['Sink(Wärme)|flow_rate'], + flow.submodel.variables['Sink(Wärme)|status'] * 0.2 * 20 + <= flow.submodel.variables['Sink(Wärme)|flow_rate'], ) assert_conequal( model.constraints['Sink(Wärme)|flow_rate|ub1'], - flow.submodel.variables['Sink(Wärme)|on'] * 0.8 * 200 >= flow.submodel.variables['Sink(Wärme)|flow_rate'], + flow.submodel.variables['Sink(Wärme)|status'] * 0.8 * 200 + >= flow.submodel.variables['Sink(Wärme)|flow_rate'], ) assert_conequal( - model.constraints['Sink(Wärme)|on_hours_total'], - flow.submodel.variables['Sink(Wärme)|on_hours_total'] - == (flow.submodel.variables['Sink(Wärme)|on'] * model.hours_per_step).sum('time'), + model.constraints['Sink(Wärme)|active_hours'], + flow.submodel.variables['Sink(Wärme)|active_hours'] + == (flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step).sum('time'), ) # Investment @@ -1161,7 +1169,7 @@ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_c assert_conequal( model.constraints['Sink(Wärme)|flow_rate|lb2'], flow.submodel.variables['Sink(Wärme)|flow_rate'] - >= flow.submodel.variables['Sink(Wärme)|on'] * mega + >= flow.submodel.variables['Sink(Wärme)|status'] * mega + flow.submodel.variables['Sink(Wärme)|size'] * 0.2 - mega, ) @@ -1178,7 +1186,7 @@ def test_flow_on_invest_non_optional(self, basic_flow_system_linopy_coords, coor size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=True), relative_minimum=0.2, relative_maximum=0.8, - on_off_parameters=fx.OnOffParameters(), + status_parameters=fx.StatusParameters(), ) flow_system.add_elements(fx.Sink('Sink', inputs=[flow])) model = create_linopy_model(flow_system) @@ -1189,8 +1197,8 @@ def test_flow_on_invest_non_optional(self, basic_flow_system_linopy_coords, coor 'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate', 'Sink(Wärme)|size', - 'Sink(Wärme)|on', - 'Sink(Wärme)|on_hours_total', + 'Sink(Wärme)|status', + 'Sink(Wärme)|active_hours', }, msg='Incorrect variables', ) @@ -1199,7 +1207,7 @@ def test_flow_on_invest_non_optional(self, basic_flow_system_linopy_coords, coor set(flow.submodel.constraints), { 'Sink(Wärme)|total_flow_hours', - 'Sink(Wärme)|on_hours_total', + 'Sink(Wärme)|active_hours', 'Sink(Wärme)|flow_rate|lb1', 'Sink(Wärme)|flow_rate|ub1', 'Sink(Wärme)|flow_rate|lb2', @@ -1218,27 +1226,31 @@ def test_flow_on_invest_non_optional(self, basic_flow_system_linopy_coords, coor ), ) - # OnOff + # Status assert_var_equal( - flow.submodel.on_off.on, + flow.submodel.status.status, model.add_variables(binary=True, coords=model.get_coords()), ) + # Upper bound is total hours when active_hours_max is not specified + total_hours = model.hours_per_step.sum('time') assert_var_equal( - model.variables['Sink(Wärme)|on_hours_total'], - model.add_variables(lower=0, coords=model.get_coords(['period', 'scenario'])), + model.variables['Sink(Wärme)|active_hours'], + model.add_variables(lower=0, upper=total_hours, coords=model.get_coords(['period', 'scenario'])), ) assert_conequal( model.constraints['Sink(Wärme)|flow_rate|lb1'], - flow.submodel.variables['Sink(Wärme)|on'] * 0.2 * 20 <= flow.submodel.variables['Sink(Wärme)|flow_rate'], + flow.submodel.variables['Sink(Wärme)|status'] * 0.2 * 20 + <= flow.submodel.variables['Sink(Wärme)|flow_rate'], ) assert_conequal( model.constraints['Sink(Wärme)|flow_rate|ub1'], - flow.submodel.variables['Sink(Wärme)|on'] * 0.8 * 200 >= flow.submodel.variables['Sink(Wärme)|flow_rate'], + flow.submodel.variables['Sink(Wärme)|status'] * 0.8 * 200 + >= flow.submodel.variables['Sink(Wärme)|flow_rate'], ) assert_conequal( - model.constraints['Sink(Wärme)|on_hours_total'], - flow.submodel.variables['Sink(Wärme)|on_hours_total'] - == (flow.submodel.variables['Sink(Wärme)|on'] * model.hours_per_step).sum('time'), + model.constraints['Sink(Wärme)|active_hours'], + flow.submodel.variables['Sink(Wärme)|active_hours'] + == (flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step).sum('time'), ) # Investment @@ -1251,7 +1263,7 @@ def test_flow_on_invest_non_optional(self, basic_flow_system_linopy_coords, coor assert_conequal( model.constraints['Sink(Wärme)|flow_rate|lb2'], flow.submodel.variables['Sink(Wärme)|flow_rate'] - >= flow.submodel.variables['Sink(Wärme)|on'] * mega + >= flow.submodel.variables['Sink(Wärme)|status'] * mega + flow.submodel.variables['Sink(Wärme)|size'] * 0.2 - mega, ) diff --git a/tests/test_functional.py b/tests/test_functional.py index ae01a44f2..4b5c6c686 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -3,7 +3,7 @@ This module defines a set of unit tests for testing the functionality of the `flixopt` framework. The tests focus on verifying the correct behavior of flow systems, including component modeling, -investment optimization, and operational constraints like on-off behavior. +investment optimization, and operational constraints like status behavior. ### Approach: 1. **Setup**: Each test initializes a flow system with a set of predefined elements and parameters. @@ -11,10 +11,10 @@ 3. **Solution**: The models are solved using the `solve_and_load` method, which performs modeling, solves the optimization problem, and loads the results. 4. **Validation**: Results are validated using assertions, primarily `assert_allclose`, to ensure model outputs match expected values with a specified tolerance. -Classes group related test cases by their functional focus: -- Minimal modeling setup (`TestMinimal`) -- Investment behavior (`TestInvestment`) -- On-off operational constraints (`TestOnOff`). +Tests group related cases by their functional focus: +- Minimal modeling setup (`TestMinimal` class) +- Investment behavior (`TestInvestment` class) +- Status operational constraints (functions: `test_startup_shutdown`, `test_consecutive_uptime_downtime`, etc.) """ import numpy as np @@ -338,7 +338,7 @@ def test_on(solver_fixture, time_steps_fixture): 'Boiler', thermal_efficiency=0.5, fuel_flow=fx.Flow('Q_fu', bus='Gas'), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=100, on_off_parameters=fx.OnOffParameters()), + thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=100, status_parameters=fx.StatusParameters()), ) ) @@ -354,7 +354,7 @@ def test_on(solver_fixture, time_steps_fixture): ) assert_allclose( - boiler.thermal_flow.submodel.on_off.on.solution.values, + boiler.thermal_flow.submodel.status.status.solution.values, [0, 1, 1, 0, 1], rtol=1e-5, atol=1e-10, @@ -381,7 +381,7 @@ def test_off(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters(consecutive_off_hours_max=100), + status_parameters=fx.StatusParameters(max_downtime=100), ), ) ) @@ -398,15 +398,15 @@ def test_off(solver_fixture, time_steps_fixture): ) assert_allclose( - boiler.thermal_flow.submodel.on_off.on.solution.values, + boiler.thermal_flow.submodel.status.status.solution.values, [0, 1, 1, 0, 1], rtol=1e-5, atol=1e-10, err_msg='"Boiler__Q_th__on" does not have the right value', ) assert_allclose( - boiler.thermal_flow.submodel.on_off.off.solution.values, - 1 - boiler.thermal_flow.submodel.on_off.on.solution.values, + boiler.thermal_flow.submodel.status.inactive.solution.values, + 1 - boiler.thermal_flow.submodel.status.status.solution.values, rtol=1e-5, atol=1e-10, err_msg='"Boiler__Q_th__off" does not have the right value', @@ -420,8 +420,8 @@ def test_off(solver_fixture, time_steps_fixture): ) -def test_switch_on_off(solver_fixture, time_steps_fixture): - """Tests if the Switch On/Off Variable is correctly created and calculated in a Flow""" +def test_startup_shutdown(solver_fixture, time_steps_fixture): + """Tests if the startup/shutdown Variable is correctly created and calculated in a Flow""" flow_system = flow_system_base(time_steps_fixture) flow_system.add_elements( fx.linear_converters.Boiler( @@ -432,7 +432,7 @@ def test_switch_on_off(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters(force_switch_on=True), + status_parameters=fx.StatusParameters(force_startup_tracking=True), ), ) ) @@ -449,21 +449,21 @@ def test_switch_on_off(solver_fixture, time_steps_fixture): ) assert_allclose( - boiler.thermal_flow.submodel.on_off.on.solution.values, + boiler.thermal_flow.submodel.status.status.solution.values, [0, 1, 1, 0, 1], rtol=1e-5, atol=1e-10, err_msg='"Boiler__Q_th__on" does not have the right value', ) assert_allclose( - boiler.thermal_flow.submodel.on_off.switch_on.solution.values, + boiler.thermal_flow.submodel.status.startup.solution.values, [0, 1, 0, 0, 1], rtol=1e-5, atol=1e-10, err_msg='"Boiler__Q_th__switch_on" does not have the right value', ) assert_allclose( - boiler.thermal_flow.submodel.on_off.switch_off.solution.values, + boiler.thermal_flow.submodel.status.shutdown.solution.values, [0, 0, 0, 1, 0], rtol=1e-5, atol=1e-10, @@ -490,7 +490,7 @@ def test_on_total_max(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters(on_hours_max=1), + status_parameters=fx.StatusParameters(active_hours_max=1), ), ), fx.linear_converters.Boiler( @@ -513,7 +513,7 @@ def test_on_total_max(solver_fixture, time_steps_fixture): ) assert_allclose( - boiler.thermal_flow.submodel.on_off.on.solution.values, + boiler.thermal_flow.submodel.status.status.solution.values, [0, 0, 1, 0, 0], rtol=1e-5, atol=1e-10, @@ -540,7 +540,7 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters(on_hours_max=2), + status_parameters=fx.StatusParameters(active_hours_max=2), ), ), fx.linear_converters.Boiler( @@ -551,7 +551,7 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters(on_hours_min=3), + status_parameters=fx.StatusParameters(active_hours_min=3), ), ), ) @@ -572,7 +572,7 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture): ) assert_allclose( - boiler.thermal_flow.submodel.on_off.on.solution.values, + boiler.thermal_flow.submodel.status.status.solution.values, [0, 0, 1, 0, 1], rtol=1e-5, atol=1e-10, @@ -587,7 +587,7 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture): ) assert_allclose( - sum(boiler_backup.thermal_flow.submodel.on_off.on.solution.values), + sum(boiler_backup.thermal_flow.submodel.status.status.solution.values), 3, rtol=1e-5, atol=1e-10, @@ -602,8 +602,8 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture): ) -def test_consecutive_on_off(solver_fixture, time_steps_fixture): - """Tests if the consecutive on/off hours are correctly created and calculated in a Flow""" +def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture): + """Tests if the consecutive uptime/downtime are correctly created and calculated in a Flow""" flow_system = flow_system_base(time_steps_fixture) flow_system.add_elements( fx.linear_converters.Boiler( @@ -614,7 +614,7 @@ def test_consecutive_on_off(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=100, - on_off_parameters=fx.OnOffParameters(consecutive_on_hours_max=2, consecutive_on_hours_min=2), + status_parameters=fx.StatusParameters(max_uptime=2, min_uptime=2), ), ), fx.linear_converters.Boiler( @@ -640,7 +640,7 @@ def test_consecutive_on_off(solver_fixture, time_steps_fixture): ) assert_allclose( - boiler.thermal_flow.submodel.on_off.on.solution.values, + boiler.thermal_flow.submodel.status.status.solution.values, [1, 1, 0, 1, 1], rtol=1e-5, atol=1e-10, @@ -682,7 +682,7 @@ def test_consecutive_off(solver_fixture, time_steps_fixture): bus='Fernwärme', size=100, previous_flow_rate=np.array([20]), # Otherwise its Off before the start - on_off_parameters=fx.OnOffParameters(consecutive_off_hours_max=2, consecutive_off_hours_min=2), + status_parameters=fx.StatusParameters(max_downtime=2, min_downtime=2), ), ), ) @@ -703,14 +703,14 @@ def test_consecutive_off(solver_fixture, time_steps_fixture): ) assert_allclose( - boiler_backup.thermal_flow.submodel.on_off.on.solution.values, + boiler_backup.thermal_flow.submodel.status.status.solution.values, [0, 0, 1, 0, 0], rtol=1e-5, atol=1e-10, err_msg='"Boiler_backup__Q_th__on" does not have the right value', ) assert_allclose( - boiler_backup.thermal_flow.submodel.on_off.off.solution.values, + boiler_backup.thermal_flow.submodel.status.inactive.solution.values, [1, 1, 0, 1, 1], rtol=1e-5, atol=1e-10, diff --git a/tests/test_linear_converter.py b/tests/test_linear_converter.py index 02aa792f3..57b911d64 100644 --- a/tests/test_linear_converter.py +++ b/tests/test_linear_converter.py @@ -134,24 +134,26 @@ def test_linear_converter_multiple_factors(self, basic_flow_system_linopy_coords input_flow1.submodel.flow_rate * 0.2 == output_flow2.submodel.flow_rate * 0.3, ) - def test_linear_converter_with_on_off(self, basic_flow_system_linopy_coords, coords_config): - """Test a LinearConverter with OnOffParameters.""" + def test_linear_converter_with_status(self, basic_flow_system_linopy_coords, coords_config): + """Test a LinearConverter with StatusParameters.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config # Create input and output flows input_flow = fx.Flow('input', bus='input_bus', size=100) output_flow = fx.Flow('output', bus='output_bus', size=100) - # Create OnOffParameters - on_off_params = fx.OnOffParameters(on_hours_min=10, on_hours_max=40, effects_per_running_hour={'costs': 5}) + # Create StatusParameters + status_params = fx.StatusParameters( + active_hours_min=10, active_hours_max=40, effects_per_active_hour={'costs': 5} + ) - # Create a linear converter with OnOffParameters + # Create a linear converter with StatusParameters converter = fx.LinearConverter( label='Converter', inputs=[input_flow], outputs=[output_flow], conversion_factors=[{input_flow.label: 0.8, output_flow.label: 1.0}], - on_off_parameters=on_off_params, + status_parameters=status_params, ) # Add to flow system @@ -164,15 +166,15 @@ def test_linear_converter_with_on_off(self, basic_flow_system_linopy_coords, coo # Create model model = create_linopy_model(flow_system) - # Verify OnOff variables and constraints - assert 'Converter|on' in model.variables - assert 'Converter|on_hours_total' in model.variables + # Verify Status variables and constraints + assert 'Converter|status' in model.variables + assert 'Converter|active_hours' in model.variables - # Check on_hours_total constraint + # Check active_hours constraint assert_conequal( - model.constraints['Converter|on_hours_total'], - model.variables['Converter|on_hours_total'] - == (model.variables['Converter|on'] * model.hours_per_step).sum('time'), + model.constraints['Converter|active_hours'], + model.variables['Converter|active_hours'] + == (model.variables['Converter|status'] * model.hours_per_step).sum('time'), ) # Check conversion constraint @@ -181,11 +183,12 @@ def test_linear_converter_with_on_off(self, basic_flow_system_linopy_coords, coo input_flow.submodel.flow_rate * 0.8 == output_flow.submodel.flow_rate * 1.0, ) - # Check on_off effects + # Check status effects assert 'Converter->costs(temporal)' in model.constraints assert_conequal( model.constraints['Converter->costs(temporal)'], - model.variables['Converter->costs(temporal)'] == model.variables['Converter|on'] * model.hours_per_step * 5, + model.variables['Converter->costs(temporal)'] + == model.variables['Converter|status'] * model.hours_per_step * 5, ) def test_linear_converter_multidimensional(self, basic_flow_system_linopy_coords, coords_config): @@ -368,15 +371,15 @@ def test_piecewise_conversion(self, basic_flow_system_linopy_coords, coords_conf assert 'Converter|Converter(input)|flow_rate|single_segment' in model.constraints # The constraint should enforce that the sum of inside_piece variables is limited - # If there's no on_off parameter, the right-hand side should be 1 + # If there's no status parameter, the right-hand side should be 1 assert_conequal( model.constraints['Converter|Converter(input)|flow_rate|single_segment'], sum([model.variables[f'Converter|Piece_{i}|inside_piece'] for i in range(len(piecewise_model.pieces))]) <= 1, ) - def test_piecewise_conversion_with_onoff(self, basic_flow_system_linopy_coords, coords_config): - """Test a LinearConverter with PiecewiseConversion and OnOffParameters.""" + def test_piecewise_conversion_with_status(self, basic_flow_system_linopy_coords, coords_config): + """Test a LinearConverter with PiecewiseConversion and StatusParameters.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config # Create input and output flows @@ -393,16 +396,18 @@ def test_piecewise_conversion_with_onoff(self, basic_flow_system_linopy_coords, {input_flow.label: fx.Piecewise(input_pieces), output_flow.label: fx.Piecewise(output_pieces)} ) - # Create OnOffParameters - on_off_params = fx.OnOffParameters(on_hours_min=10, on_hours_max=40, effects_per_running_hour={'costs': 5}) + # Create StatusParameters + status_params = fx.StatusParameters( + active_hours_min=10, active_hours_max=40, effects_per_active_hour={'costs': 5} + ) - # Create a linear converter with piecewise conversion and on/off parameters + # Create a linear converter with piecewise conversion and status parameters converter = fx.LinearConverter( label='Converter', inputs=[input_flow], outputs=[output_flow], piecewise_conversion=piecewise_conversion, - on_off_parameters=on_off_params, + status_parameters=status_params, ) # Add to flow system @@ -424,9 +429,9 @@ def test_piecewise_conversion_with_onoff(self, basic_flow_system_linopy_coords, # Check that we have the expected pieces (2 in this case) assert len(piecewise_model.pieces) == 2 - # Verify that the on variable was used as the zero_point for the piecewise model - # When using OnOffParameters, the zero_point should be the on variable - assert 'Converter|on' in model.variables + # Verify that the status variable was used as the zero_point for the piecewise model + # When using StatusParameters, the zero_point should be the status variable + assert 'Converter|status' in model.variables assert piecewise_model.zero_point is not None # Should be a variable # Verify that variables were created for each piece @@ -473,21 +478,22 @@ def test_piecewise_conversion_with_onoff(self, basic_flow_system_linopy_coords, assert_conequal( model.constraints['Converter|Converter(input)|flow_rate|single_segment'], sum([model.variables[f'Converter|Piece_{i}|inside_piece'] for i in range(len(piecewise_model.pieces))]) - <= model.variables['Converter|on'], + <= model.variables['Converter|status'], ) - # Also check that the OnOff model is working correctly - assert 'Converter|on_hours_total' in model.constraints + # Also check that the Status model is working correctly + assert 'Converter|active_hours' in model.constraints assert_conequal( - model.constraints['Converter|on_hours_total'], - model['Converter|on_hours_total'] == (model['Converter|on'] * model.hours_per_step).sum('time'), + model.constraints['Converter|active_hours'], + model['Converter|active_hours'] == (model['Converter|status'] * model.hours_per_step).sum('time'), ) # Verify that the costs effect is applied assert 'Converter->costs(temporal)' in model.constraints assert_conequal( model.constraints['Converter->costs(temporal)'], - model.variables['Converter->costs(temporal)'] == model.variables['Converter|on'] * model.hours_per_step * 5, + model.variables['Converter->costs(temporal)'] + == model.variables['Converter|status'] * model.hours_per_step * 5, ) diff --git a/tests/test_scenarios.py b/tests/test_scenarios.py index bd402cb8c..c952777b2 100644 --- a/tests/test_scenarios.py +++ b/tests/test_scenarios.py @@ -143,7 +143,7 @@ def flow_system_complex_scenarios() -> fx.FlowSystem: boiler = fx.linear_converters.Boiler( 'Kessel', thermal_efficiency=0.5, - on_off_parameters=fx.OnOffParameters(effects_per_running_hour={'costs': 0, 'CO2': 1000}), + status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}), thermal_flow=fx.Flow( 'Q_th', bus='Fernwärme', @@ -158,14 +158,14 @@ def flow_system_complex_scenarios() -> fx.FlowSystem: mandatory=True, effects_of_investment_per_size={'costs': 10, 'PE': 2}, ), - on_off_parameters=fx.OnOffParameters( - on_hours_min=0, - on_hours_max=1000, - consecutive_on_hours_max=10, - consecutive_on_hours_min=1, - consecutive_off_hours_max=10, - effects_per_switch_on=0.01, - switch_on_max=1000, + status_parameters=fx.StatusParameters( + active_hours_min=0, + active_hours_max=1000, + max_uptime=10, + min_uptime=1, + max_downtime=10, + effects_per_startup=0.01, + startup_limit=1000, ), flow_hours_max=1e6, ), @@ -231,7 +231,7 @@ def flow_system_piecewise_conversion_scenarios(flow_system_complex_scenarios) -> 'Q_fu': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]), } ), - on_off_parameters=fx.OnOffParameters(effects_per_switch_on=0.01), + status_parameters=fx.StatusParameters(effects_per_startup=0.01), ) ) diff --git a/tests/test_storage.py b/tests/test_storage.py index 6220ee08a..a5d2c7a19 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -408,8 +408,8 @@ def test_simultaneous_charge_discharge(self, basic_flow_system_linopy_coords, co # Binary variables should exist when preventing simultaneous operation if prevent_simultaneous: binary_vars = { - 'SimultaneousStorage(Q_th_in)|on', - 'SimultaneousStorage(Q_th_out)|on', + 'SimultaneousStorage(Q_th_in)|status', + 'SimultaneousStorage(Q_th_out)|status', } for var_name in binary_vars: assert var_name in model.variables, f'Missing binary variable: {var_name}' @@ -420,7 +420,8 @@ def test_simultaneous_charge_discharge(self, basic_flow_system_linopy_coords, co assert_conequal( model.constraints['SimultaneousStorage|prevent_simultaneous_use'], - model.variables['SimultaneousStorage(Q_th_in)|on'] + model.variables['SimultaneousStorage(Q_th_out)|on'] + model.variables['SimultaneousStorage(Q_th_in)|status'] + + model.variables['SimultaneousStorage(Q_th_out)|status'] <= 1, ) From 60a3e65aa8a09f3c6536b7f99fd5629be519b2d5 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sun, 30 Nov 2025 04:21:47 +0100 Subject: [PATCH 08/49] Removed all deprecated items --- CHANGELOG.md | 40 +++++- flixopt/__init__.py | 16 +-- flixopt/calculation.py | 177 ------------------------- flixopt/clustering.py | 39 ------ flixopt/config.py | 24 +--- flixopt/core.py | 35 +---- flixopt/effects.py | 23 ++-- flixopt/elements.py | 18 +-- flixopt/flow_system.py | 77 +---------- flixopt/optimization.py | 87 +----------- flixopt/results.py | 270 +------------------------------------- tests/test_config.py | 7 - tests/test_integration.py | 6 +- tests/test_scenarios.py | 14 +- 14 files changed, 75 insertions(+), 758 deletions(-) delete mode 100644 flixopt/calculation.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 3bc226e40..6d87e290f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -51,7 +51,7 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp ## [Unreleased] - ????-??-?? -**Summary**: Renamed OnOff terminology to Status terminology for better alignment with PyPSA and unit commitment standards. +**Summary**: Renamed OnOff terminology to Status terminology for better alignment with PyPSA and unit commitment standards. **All deprecated items from v4.x have been removed.** ### ✨ Added @@ -127,6 +127,44 @@ A partial backwards compatibility wrapper would be misleading, so we opted for a ### 🔥 Removed +**Modules removed:** +- `calculation.py` module - Use `optimization.py` instead + +**Classes removed:** +- `Calculation`, `FullCalculation` → Use `Optimization` +- `AggregatedCalculation` → Use `ClusteredOptimization` +- `SegmentedCalculation` → Use `SegmentedOptimization` +- `Aggregation` → Use `Clustering` +- `AggregationParameters` → Use `ClusteringParameters` +- `AggregationModel` → Use `ClusteringModel` +- `CalculationResults` → Use `Results` +- `SegmentedCalculationResults` → Use `SegmentedResults` + +**Functions removed:** +- `change_logging_level()` → Use `CONFIG.Logging.enable_console()` + +**Methods removed:** +- `Optimization._perform_aggregation()` → Use `_perform_clustering()` +- `Optimization.calculate_aggregation_weights()` → Use `calculate_clustering_weights()` + +**Parameters removed:** +- `Optimization.active_timesteps` → Use `flow_system.sel(time=...)` or `flow_system.isel(time=...)` +- `TimeSeriesData.from_dataarray()`: `aggregation_group` → Use `clustering_group` +- `TimeSeriesData.from_dataarray()`: `aggregation_weight` → Use `clustering_weight` +- `FlowSystem.weights` → Use `scenario_weights` +- `Results.__init__()`: `flow_system` → Use `flow_system_data` +- `Results` plotting methods: `indexer` → Use `select` +- `Results.plot_heatmap()`: `heatmap_timeframes`, `heatmap_timesteps_per_frame` → Use `reshape_time` +- `Results.plot_heatmap()`: `color_map` → Use `colors` + +**Properties removed:** +- `FlowSystem.all_elements` → Use dict-like interface (`flow_system['label']`, `.keys()`, `.values()`, `.items()`) +- `FlowSystem.weights` → Use `scenario_weights` + +**Features removed:** +- Passing `Bus` objects directly to `Flow` → Pass bus label string instead and add Bus to FlowSystem +- Using `Effect` objects in `EffectValues` → Use effect label strings instead + **Deprecated parameters removed** (all were deprecated in v4.0.0 or earlier): **TimeSeriesData:** diff --git a/flixopt/__init__.py b/flixopt/__init__.py index 0f8fc73e2..8874811b3 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -14,10 +14,7 @@ # Import commonly used classes and functions from . import linear_converters, plotting, results, solvers - -# Import old Calculation classes for backwards compatibility (deprecated) -from .calculation import AggregatedCalculation, FullCalculation, SegmentedCalculation -from .clustering import AggregationParameters, ClusteringParameters # AggregationParameters is deprecated +from .clustering import ClusteringParameters from .components import ( LinearConverter, Sink, @@ -26,20 +23,17 @@ Storage, Transmission, ) -from .config import CONFIG, change_logging_level +from .config import CONFIG from .core import TimeSeriesData from .effects import PENALTY_EFFECT_LABEL, Effect from .elements import Bus, Flow from .flow_system import FlowSystem from .interface import InvestParameters, Piece, Piecewise, PiecewiseConversion, PiecewiseEffects, StatusParameters - -# Import new Optimization classes from .optimization import ClusteredOptimization, Optimization, SegmentedOptimization __all__ = [ 'TimeSeriesData', 'CONFIG', - 'change_logging_level', 'Flow', 'Bus', 'Effect', @@ -51,14 +45,9 @@ 'LinearConverter', 'Transmission', 'FlowSystem', - # New Optimization classes (preferred) 'Optimization', 'ClusteredOptimization', 'SegmentedOptimization', - # Old Calculation classes (deprecated, for backwards compatibility) - 'FullCalculation', - 'AggregatedCalculation', - 'SegmentedCalculation', 'InvestParameters', 'StatusParameters', 'Piece', @@ -66,7 +55,6 @@ 'PiecewiseConversion', 'PiecewiseEffects', 'ClusteringParameters', - 'AggregationParameters', # Deprecated, use ClusteringParameters 'plotting', 'results', 'linear_converters', diff --git a/flixopt/calculation.py b/flixopt/calculation.py deleted file mode 100644 index 1211c6763..000000000 --- a/flixopt/calculation.py +++ /dev/null @@ -1,177 +0,0 @@ -""" -This module provides backwards-compatible aliases for the renamed Optimization classes. - -DEPRECATED: This module is deprecated. Use the optimization module instead. -The following classes have been renamed: - - Calculation -> Optimization - - FullCalculation -> Optimization (now the standard, no "Full" prefix) - - AggregatedCalculation -> ClusteredOptimization - - SegmentedCalculation -> SegmentedOptimization - -Import from flixopt.optimization or use the new names from flixopt directly. -""" - -from __future__ import annotations - -import logging -import warnings -from typing import TYPE_CHECKING - -from .config import DEPRECATION_REMOVAL_VERSION -from .optimization import ( - ClusteredOptimization as _ClusteredOptimization, -) -from .optimization import ( - Optimization as _Optimization, -) -from .optimization import ( - SegmentedOptimization as _SegmentedOptimization, -) - -if TYPE_CHECKING: - import pathlib - from typing import Annotated - - import pandas as pd - - from .clustering import AggregationParameters - from .elements import Component - from .flow_system import FlowSystem - -logger = logging.getLogger('flixopt') - - -def _deprecation_warning(old_name: str, new_name: str): - """Issue a deprecation warning for renamed classes.""" - warnings.warn( - f'{old_name} is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. Use {new_name} instead.', - DeprecationWarning, - stacklevel=3, - ) - - -class Calculation(_Optimization): - """ - DEPRECATED: Use Optimization instead. - - class for defined way of solving a flow_system optimization - - Args: - name: name of calculation - flow_system: flow_system which should be calculated - folder: folder where results should be saved. If None, then the current working directory is used. - normalize_weights: Whether to automatically normalize the weights of scenarios to sum up to 1 when solving. - active_timesteps: Deprecated. Use FlowSystem.sel(time=...) or FlowSystem.isel(time=...) instead. - """ - - def __init__( - self, - name: str, - flow_system: FlowSystem, - active_timesteps: Annotated[ - pd.DatetimeIndex | None, - 'DEPRECATED: Use flow_system.sel(time=...) or flow_system.isel(time=...) instead', - ] = None, - folder: pathlib.Path | None = None, - normalize_weights: bool = True, - ): - _deprecation_warning('Calculation', 'Optimization') - super().__init__(name, flow_system, active_timesteps, folder, normalize_weights) - - -class FullCalculation(_Optimization): - """ - DEPRECATED: Use Optimization instead (the "Full" prefix has been removed). - - FullCalculation solves the complete optimization problem using all time steps. - - This is the most comprehensive calculation type that considers every time step - in the optimization, providing the most accurate but computationally intensive solution. - - Args: - name: name of calculation - flow_system: flow_system which should be calculated - folder: folder where results should be saved. If None, then the current working directory is used. - normalize_weights: Whether to automatically normalize the weights of scenarios to sum up to 1 when solving. - active_timesteps: Deprecated. Use FlowSystem.sel(time=...) or FlowSystem.isel(time=...) instead. - """ - - def __init__( - self, - name: str, - flow_system: FlowSystem, - active_timesteps: Annotated[ - pd.DatetimeIndex | None, - 'DEPRECATED: Use flow_system.sel(time=...) or flow_system.isel(time=...) instead', - ] = None, - folder: pathlib.Path | None = None, - normalize_weights: bool = True, - ): - _deprecation_warning('FullCalculation', 'Optimization') - super().__init__(name, flow_system, active_timesteps, folder, normalize_weights) - - -class AggregatedCalculation(_ClusteredOptimization): - """ - DEPRECATED: Use ClusteredOptimization instead. - - AggregatedCalculation reduces computational complexity by clustering time series into typical periods. - - This calculation approach aggregates time series data using clustering techniques (tsam) to identify - representative time periods, significantly reducing computation time while maintaining solution accuracy. - - Args: - name: Name of the calculation - flow_system: FlowSystem to be optimized - aggregation_parameters: Parameters for aggregation. See AggregationParameters class documentation - components_to_clusterize: list of Components to perform aggregation on. If None, all components are aggregated. - This equalizes variables in the components according to the typical periods computed in the aggregation - active_timesteps: DatetimeIndex of timesteps to use for optimization. If None, all timesteps are used - folder: Folder where results should be saved. If None, current working directory is used - """ - - def __init__( - self, - name: str, - flow_system: FlowSystem, - aggregation_parameters: AggregationParameters, - components_to_clusterize: list[Component] | None = None, - active_timesteps: Annotated[ - pd.DatetimeIndex | None, - 'DEPRECATED: Use flow_system.sel(time=...) or flow_system.isel(time=...) instead', - ] = None, - folder: pathlib.Path | None = None, - ): - _deprecation_warning('AggregatedCalculation', 'ClusteredOptimization') - super().__init__(name, flow_system, aggregation_parameters, components_to_clusterize, active_timesteps, folder) - - -class SegmentedCalculation(_SegmentedOptimization): - """ - DEPRECATED: Use SegmentedOptimization instead. - - Solve large optimization problems by dividing time horizon into (overlapping) segments. - - Args: - name: Unique identifier for the calculation, used in result files and logging. - flow_system: The FlowSystem to optimize, containing all components, flows, and buses. - timesteps_per_segment: Number of timesteps in each segment (excluding overlap). - overlap_timesteps: Number of additional timesteps added to each segment. - nr_of_previous_values: Number of previous timestep values to transfer between segments for initialization. - folder: Directory for saving results. Defaults to current working directory + 'results'. - """ - - def __init__( - self, - name: str, - flow_system: FlowSystem, - timesteps_per_segment: int, - overlap_timesteps: int, - nr_of_previous_values: int = 1, - folder: pathlib.Path | None = None, - ): - _deprecation_warning('SegmentedCalculation', 'SegmentedOptimization') - super().__init__(name, flow_system, timesteps_per_segment, overlap_timesteps, nr_of_previous_values, folder) - - -__all__ = ['Calculation', 'FullCalculation', 'AggregatedCalculation', 'SegmentedCalculation'] diff --git a/flixopt/clustering.py b/flixopt/clustering.py index 2fbd65318..1c6f7511b 100644 --- a/flixopt/clustering.py +++ b/flixopt/clustering.py @@ -9,13 +9,10 @@ import logging import pathlib import timeit -import warnings as _warnings from typing import TYPE_CHECKING import numpy as np -from .config import DEPRECATION_REMOVAL_VERSION - try: import tsam.timeseriesaggregation as tsam @@ -401,39 +398,3 @@ def _equate_indices(self, variable: linopy.Variable, indices: tuple[np.ndarray, var_k0.sum(dim='time') + var_k1.sum(dim='time') <= limit, short_name=f'limit_corrections|{variable.name}', ) - - -# ===== Deprecated aliases for backward compatibility ===== - - -def _create_deprecation_warning(old_name: str, new_name: str): - """Helper to create a deprecation warning""" - _warnings.warn( - f"'{old_name}' is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. Use '{new_name}' instead.", - DeprecationWarning, - stacklevel=3, - ) - - -class Aggregation(Clustering): - """Deprecated: Use Clustering instead.""" - - def __init__(self, *args, **kwargs): - _create_deprecation_warning('Aggregation', 'Clustering') - super().__init__(*args, **kwargs) - - -class AggregationParameters(ClusteringParameters): - """Deprecated: Use ClusteringParameters instead.""" - - def __init__(self, *args, **kwargs): - _create_deprecation_warning('AggregationParameters', 'ClusteringParameters') - super().__init__(*args, **kwargs) - - -class AggregationModel(ClusteringModel): - """Deprecated: Use ClusteringModel instead.""" - - def __init__(self, *args, **kwargs): - _create_deprecation_warning('AggregationModel', 'ClusteringModel') - super().__init__(*args, **kwargs) diff --git a/flixopt/config.py b/flixopt/config.py index dbe2bf3c5..f090430b0 100644 --- a/flixopt/config.py +++ b/flixopt/config.py @@ -20,7 +20,7 @@ COLORLOG_AVAILABLE = False escape_codes = None -__all__ = ['CONFIG', 'change_logging_level', 'MultilineFormatter', 'SUCCESS_LEVEL'] +__all__ = ['CONFIG', 'MultilineFormatter', 'SUCCESS_LEVEL'] if COLORLOG_AVAILABLE: __all__.append('ColoredMultilineFormatter') @@ -30,7 +30,7 @@ logging.addLevelName(SUCCESS_LEVEL, 'SUCCESS') # Deprecation removal version - update this when planning the next major version -DEPRECATION_REMOVAL_VERSION = '5.0.0' +DEPRECATION_REMOVAL_VERSION = '6.0.0' class MultilineFormatter(logging.Formatter): @@ -808,23 +808,3 @@ def _apply_config_dict(cls, config_dict: dict) -> None: elif hasattr(cls, key) and key != 'logging': # Skip 'logging' as it requires special handling via CONFIG.Logging methods setattr(cls, key, value) - - -def change_logging_level(level_name: str | int) -> None: - """Change the logging level for the flixopt logger. - - Args: - level_name: The logging level to set (DEBUG, INFO, WARNING, ERROR, CRITICAL or logging constant). - - Examples: - >>> change_logging_level('DEBUG') # deprecated - >>> # Use this instead: - >>> CONFIG.Logging.enable_console('DEBUG') - """ - warnings.warn( - f'change_logging_level is deprecated and will be removed in version {DEPRECATION_REMOVAL_VERSION} ' - 'Use CONFIG.Logging.enable_console(level) instead.', - DeprecationWarning, - stacklevel=2, - ) - CONFIG.Logging.enable_console(level_name) diff --git a/flixopt/core.py b/flixopt/core.py index f2c2c049a..a14aa6654 100644 --- a/flixopt/core.py +++ b/flixopt/core.py @@ -4,7 +4,6 @@ """ import logging -import warnings from itertools import permutations from typing import Any, Literal @@ -12,7 +11,6 @@ import pandas as pd import xarray as xr -from .config import DEPRECATION_REMOVAL_VERSION from .types import NumericOrBool logger = logging.getLogger('flixopt') @@ -101,40 +99,11 @@ def from_dataarray( da: xr.DataArray, clustering_group: str | None = None, clustering_weight: float | None = None, - aggregation_group: str | None = None, - aggregation_weight: float | None = None, ): """Create TimeSeriesData from DataArray, extracting metadata from attrs.""" - # Handle deprecated parameters - if aggregation_group is not None: - warnings.warn( - f'aggregation_group is deprecated, use clustering_group instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - if clustering_group is None: - clustering_group = aggregation_group - if aggregation_weight is not None: - warnings.warn( - f'aggregation_weight is deprecated, use clustering_weight instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - if clustering_weight is None: - clustering_weight = aggregation_weight - - # Get clustering metadata from attrs or parameters (try both old and new attrs keys for backward compat) - final_clustering_group = ( - clustering_group - if clustering_group is not None - else da.attrs.get('clustering_group', da.attrs.get('aggregation_group')) - ) + final_clustering_group = clustering_group if clustering_group is not None else da.attrs.get('clustering_group') final_clustering_weight = ( - clustering_weight - if clustering_weight is not None - else da.attrs.get('clustering_weight', da.attrs.get('aggregation_weight')) + clustering_weight if clustering_weight is not None else da.attrs.get('clustering_weight') ) return cls(da, clustering_group=final_clustering_group, clustering_weight=final_clustering_weight) diff --git a/flixopt/effects.py b/flixopt/effects.py index 124504c67..9df7c2ce5 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -8,7 +8,6 @@ from __future__ import annotations import logging -import warnings from collections import deque from typing import TYPE_CHECKING, Literal @@ -16,7 +15,6 @@ import numpy as np import xarray as xr -from .config import DEPRECATION_REMOVAL_VERSION from .core import PlausibilityError from .features import ShareAllocationModel from .structure import Element, ElementContainer, ElementModel, FlowSystemModel, Submodel, register_class_for_io @@ -468,21 +466,16 @@ def create_effect_values_dict(self, effect_values_user: Numeric_TPS | Effect_TPS Note: a standard effect must be defined when passing scalars or None labels. """ - def get_effect_label(eff: Effect | str) -> str: - """Temporary function to get the label of an effect and warn for deprecation""" + def get_effect_label(eff: str | None) -> str: + """Get the label of an effect""" + if eff is None: + return self.standard_effect.label if isinstance(eff, Effect): - warnings.warn( - f'The use of effect objects when specifying EffectValues is deprecated. ' - f'Use the label of the effect instead. Used effect: {eff.label_full}. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - UserWarning, - stacklevel=2, + raise TypeError( + f'Effect objects are no longer accepted when specifying EffectValues. ' + f'Use the label string instead. Got: {eff.label_full}' ) - return eff.label - elif eff is None: - return self.standard_effect.label - else: - return eff + return eff if effect_values_user is None: return None diff --git a/flixopt/elements.py b/flixopt/elements.py index f12dae4c4..ae31f34c6 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -5,14 +5,13 @@ from __future__ import annotations import logging -import warnings from typing import TYPE_CHECKING import numpy as np import xarray as xr from . import io as fx_io -from .config import CONFIG, DEPRECATION_REMOVAL_VERSION +from .config import CONFIG from .core import PlausibilityError from .features import InvestmentModel, StatusModel from .interface import InvestParameters, StatusParameters @@ -486,18 +485,11 @@ def __init__( self.component: str = 'UnknownComponent' self.is_input_in_component: bool | None = None if isinstance(bus, Bus): - self.bus = bus.label_full - warnings.warn( - f'Bus {bus.label} is passed as a Bus object to {self.label}. This is deprecated and will be removed ' - f'in the future. Add the Bus to the FlowSystem instead and pass its label to the Flow. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - UserWarning, - stacklevel=1, + raise TypeError( + f'Bus {bus.label} is passed as a Bus object to Flow {self.label}. ' + f'This is no longer supported. Add the Bus to the FlowSystem and pass its label (string) to the Flow.' ) - self._bus_object = bus - else: - self.bus = bus - self._bus_object = None + self.bus = bus def create_model(self, model: FlowSystemModel) -> FlowModel: self._plausibility_checks() diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index c0deaa1ca..98d4c5c0b 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -15,7 +15,7 @@ import xarray as xr from . import io as fx_io -from .config import CONFIG, DEPRECATION_REMOVAL_VERSION +from .config import CONFIG from .core import ( ConversionError, DataConverter, @@ -142,9 +142,6 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): (components, buses, effects, flows) to find the element with the matching label. - Element labels must be unique across all container types. Attempting to add elements with duplicate labels will raise an error, ensuring each label maps to exactly one element. - - The `.all_elements` property is deprecated. Use the dict-like interface instead: - `flow_system['element']`, `'element' in flow_system`, `flow_system.keys()`, - `flow_system.values()`, or `flow_system.items()`. - Direct container access (`.components`, `.buses`, `.effects`, `.flows`) is useful when you need type-specific filtering or operations. - The `.flows` container is automatically populated from all component inputs and outputs. @@ -166,18 +163,7 @@ def __init__( scenario_weights: Numeric_S | None = None, scenario_independent_sizes: bool | list[str] = True, scenario_independent_flow_rates: bool | list[str] = False, - **kwargs, ): - scenario_weights = self._handle_deprecated_kwarg( - kwargs, - 'weights', - 'scenario_weights', - scenario_weights, - check_conflict=True, - additional_warning_message='This might lead to later errors if your custom weights used the period dimension.', - ) - self._validate_kwargs(kwargs) - self.timesteps = self._validate_timesteps(timesteps) # Compute all time-related metadata using shared helper @@ -990,18 +976,6 @@ def _connect_network(self): flow.component = component.label_full flow.is_input_in_component = True if flow in component.inputs else False - # Add Bus if not already added (deprecated) - if flow._bus_object is not None and flow._bus_object.label_full not in self.buses: - warnings.warn( - f'The Bus {flow._bus_object.label_full} was added to the FlowSystem from {flow.label_full}.' - f'This is deprecated and will be removed in the future. ' - f'Please pass the Bus.label to the Flow and the Bus to the FlowSystem instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=1, - ) - self._add_buses(flow._bus_object) - # Connect Buses bus = self.buses.get(flow.bus) if bus is None: @@ -1093,29 +1067,6 @@ def flows(self) -> ElementContainer[Flow]: self._flows_cache = ElementContainer(flows, element_type_name='flows', truncate_repr=10) return self._flows_cache - @property - def all_elements(self) -> dict[str, Element]: - """ - Get all elements as a dictionary. - - .. deprecated:: 3.2.0 - Use dict-like interface instead: `flow_system['element']`, `'element' in flow_system`, - `flow_system.keys()`, `flow_system.values()`, or `flow_system.items()`. - This property will be removed in v4.0.0. - - Returns: - Dictionary mapping element labels to element objects. - """ - warnings.warn( - "The 'all_elements' property is deprecated. Use dict-like interface instead: " - "flow_system['element'], 'element' in flow_system, flow_system.keys(), " - 'flow_system.values(), or flow_system.items(). ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return {**self.components, **self.effects, **self.flows, **self.buses} - @property def coords(self) -> dict[FlowSystemDimensions, pd.Index]: active_coords = {'time': self.timesteps} @@ -1163,32 +1114,6 @@ def scenario_weights(self, value: Numeric_S | None) -> None: self._scenario_weights = self.fit_to_model_coords('scenario_weights', value, dims=['scenario']) - @property - def weights(self) -> Numeric_S | None: - warnings.warn( - f'FlowSystem.weights is deprecated. Use FlowSystem.scenario_weights instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self.scenario_weights - - @weights.setter - def weights(self, value: Numeric_S) -> None: - """ - Set weights (deprecated - sets scenario_weights). - - Args: - value: Scenario weights to set - """ - warnings.warn( - f'Setting FlowSystem.weights is deprecated. Set FlowSystem.scenario_weights instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - self.scenario_weights = value # Use the scenario_weights setter - def _validate_scenario_parameter(self, value: bool | list[str], param_name: str, element_type: str) -> None: """ Validate scenario parameter value. diff --git a/flixopt/optimization.py b/flixopt/optimization.py index e537029d7..1de1bbc49 100644 --- a/flixopt/optimization.py +++ b/flixopt/optimization.py @@ -15,9 +15,8 @@ import pathlib import sys import timeit -import warnings from collections import Counter -from typing import TYPE_CHECKING, Annotated, Any, Protocol, runtime_checkable +from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable import numpy as np from tqdm import tqdm @@ -26,7 +25,7 @@ from .clustering import Clustering, ClusteringModel, ClusteringParameters from .components import Storage from .config import CONFIG, SUCCESS_LEVEL -from .core import DEPRECATION_REMOVAL_VERSION, DataConverter, TimeSeriesData, drop_constant_arrays +from .core import DataConverter, TimeSeriesData, drop_constant_arrays from .effects import PENALTY_EFFECT_LABEL from .features import InvestmentModel from .flow_system import FlowSystem @@ -85,7 +84,6 @@ def _initialize_optimization_common( obj: Any, name: str, flow_system: FlowSystem, - active_timesteps: pd.DatetimeIndex | None = None, folder: pathlib.Path | None = None, normalize_weights: bool = True, ) -> None: @@ -99,7 +97,6 @@ def _initialize_optimization_common( obj: The optimization object being initialized name: Name of the optimization flow_system: FlowSystem to optimize - active_timesteps: DEPRECATED. Use flow_system.sel(time=...) instead folder: Directory for saving results normalize_weights: Whether to normalize scenario weights """ @@ -112,17 +109,6 @@ def _initialize_optimization_common( ) flow_system = flow_system.copy() - if active_timesteps is not None: - warnings.warn( - f"The 'active_timesteps' parameter is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. " - 'Use flow_system.sel(time=timesteps) or flow_system.isel(time=indices) before passing ' - 'the FlowSystem to the Optimization instead.', - DeprecationWarning, - stacklevel=2, - ) - flow_system = flow_system.sel(time=active_timesteps) - - obj._active_timesteps = active_timesteps # deprecated obj.normalize_weights = normalize_weights flow_system._used_in_optimization = True @@ -155,7 +141,6 @@ class Optimization: flow_system: flow_system which should be optimized folder: folder where results should be saved. If None, then the current working directory is used. normalize_weights: Whether to automatically normalize the weights of scenarios to sum up to 1 when solving. - active_timesteps: Deprecated. Use FlowSystem.sel(time=...) or FlowSystem.isel(time=...) instead. Examples: Basic usage: @@ -182,10 +167,6 @@ def __init__( self, name: str, flow_system: FlowSystem, - active_timesteps: Annotated[ - pd.DatetimeIndex | None, - 'DEPRECATED: Use flow_system.sel(time=...) or flow_system.isel(time=...) instead', - ] = None, folder: pathlib.Path | None = None, normalize_weights: bool = True, ): @@ -193,7 +174,6 @@ def __init__( self, name=name, flow_system=flow_system, - active_timesteps=active_timesteps, folder=folder, normalize_weights=normalize_weights, ) @@ -360,16 +340,6 @@ def summary(self): 'Config': CONFIG.to_dict(), } - @property - def active_timesteps(self) -> pd.DatetimeIndex | None: - warnings.warn( - f'active_timesteps is deprecated. Use flow_system.sel(time=...) or flow_system.isel(time=...) instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self._active_timesteps - @property def modeled(self) -> bool: return True if self.model is not None else False @@ -393,7 +363,6 @@ class ClusteredOptimization(Optimization): clustering_parameters: Parameters for clustering. See ClusteringParameters class documentation components_to_clusterize: list of Components to perform aggregation on. If None, all components are aggregated. This equalizes variables in the components according to the typical periods computed in the aggregation - active_timesteps: DatetimeIndex of timesteps to use for optimization. If None, all timesteps are used folder: Folder where results should be saved. If None, current working directory is used normalize_weights: Whether to automatically normalize the weights of scenarios to sum up to 1 when solving @@ -408,10 +377,6 @@ def __init__( flow_system: FlowSystem, clustering_parameters: ClusteringParameters, components_to_clusterize: list[Component] | None = None, - active_timesteps: Annotated[ - pd.DatetimeIndex | None, - 'DEPRECATED: Use flow_system.sel(time=...) or flow_system.isel(time=...) instead', - ] = None, folder: pathlib.Path | None = None, normalize_weights: bool = True, ): @@ -422,7 +387,6 @@ def __init__( super().__init__( name=name, flow_system=flow_system, - active_timesteps=active_timesteps, folder=folder, normalize_weights=normalize_weights, ) @@ -503,26 +467,10 @@ def _perform_clustering(self): self.flow_system.connect_and_transform() self.durations['clustering'] = round(timeit.default_timer() - t_start_agg, 2) - def _perform_aggregation(self): - """Deprecated: Use _perform_clustering instead.""" - warnings.warn( - f'_perform_aggregation is deprecated, use _perform_clustering instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self._perform_clustering() - @classmethod def calculate_clustering_weights(cls, ds: xr.Dataset) -> dict[str, float]: """Calculate weights for all datavars in the dataset. Weights are pulled from the attrs of the datavars.""" - - # Support both old and new attr names for backward compatibility - groups = [ - da.attrs.get('clustering_group', da.attrs.get('aggregation_group')) - for da in ds.data_vars.values() - if 'clustering_group' in da.attrs or 'aggregation_group' in da.attrs - ] + groups = [da.attrs.get('clustering_group') for da in ds.data_vars.values() if 'clustering_group' in da.attrs] group_counts = Counter(groups) # Calculate weight for each group (1/count) @@ -530,31 +478,18 @@ def calculate_clustering_weights(cls, ds: xr.Dataset) -> dict[str, float]: weights = {} for name, da in ds.data_vars.items(): - # Try both old and new attr names - clustering_group = da.attrs.get('clustering_group', da.attrs.get('aggregation_group')) + clustering_group = da.attrs.get('clustering_group') group_weight = group_weights.get(clustering_group) if group_weight is not None: weights[name] = group_weight else: - # Try both old and new attr names for weight - weights[name] = da.attrs.get('clustering_weight', da.attrs.get('aggregation_weight', 1)) + weights[name] = da.attrs.get('clustering_weight', 1) if np.all(np.isclose(list(weights.values()), 1, atol=1e-6)): logger.info('All Clustering weights were set to 1') return weights - @classmethod - def calculate_aggregation_weights(cls, ds: xr.Dataset) -> dict[str, float]: - """Deprecated: Use calculate_clustering_weights instead.""" - warnings.warn( - f'calculate_aggregation_weights is deprecated, use calculate_clustering_weights instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return cls.calculate_clustering_weights(ds) - class SegmentedOptimization: """Solve large optimization problems by dividing time horizon into (overlapping) segments. @@ -673,7 +608,6 @@ class SegmentedOptimization: durations: dict[str, float] model: None # SegmentedOptimization doesn't use a single model normalize_weights: bool - _active_timesteps: pd.DatetimeIndex | None def __init__( self, @@ -688,7 +622,6 @@ def __init__( self, name=name, flow_system=flow_system, - active_timesteps=None, folder=folder, ) self.timesteps_per_segment = timesteps_per_segment @@ -977,13 +910,3 @@ def summary(self): 'Durations': self.durations, 'Config': CONFIG.to_dict(), } - - @property - def active_timesteps(self) -> pd.DatetimeIndex | None: - warnings.warn( - f'active_timesteps is deprecated. Use flow_system.sel(time=...) or flow_system.isel(time=...) instead. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - return self._active_timesteps diff --git a/flixopt/results.py b/flixopt/results.py index 6b9a1c580..f3d0c19a9 100644 --- a/flixopt/results.py +++ b/flixopt/results.py @@ -15,7 +15,7 @@ from . import io as fx_io from . import plotting from .color_processing import process_colors -from .config import CONFIG, DEPRECATION_REMOVAL_VERSION, SUCCESS_LEVEL +from .config import CONFIG, SUCCESS_LEVEL from .flow_system import FlowSystem from .structure import CompositeContainerMixin, ResultsContainer @@ -209,7 +209,6 @@ def __init__( summary: dict, folder: pathlib.Path | None = None, model: linopy.Model | None = None, - **kwargs, # To accept old "flow_system" parameter ): """Initialize Results with optimization data. Usually, this class is instantiated by an Optimization object via `Results.from_optimization()` @@ -222,28 +221,7 @@ def __init__( summary: Optimization metadata. folder: Results storage folder. model: Linopy optimization model. - Deprecated: - flow_system: Use flow_system_data instead. - - Note: - The legacy alias `CalculationResults` is deprecated. Use `Results` instead. """ - # Handle potential old "flow_system" parameter for backward compatibility - if 'flow_system' in kwargs and flow_system_data is None: - flow_system_data = kwargs.pop('flow_system') - warnings.warn( - "The 'flow_system' parameter is deprecated. Use 'flow_system_data' instead. " - "Access is now via '.flow_system_data', while '.flow_system' returns the restored FlowSystem. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - - # Validate that flow_system_data is provided - if flow_system_data is None: - raise TypeError( - "flow_system_data is required (or use deprecated 'flow_system' for backward compatibility)." - ) self.solution = solution self.flow_system_data = flow_system_data @@ -909,11 +887,6 @@ def plot_heatmap( | Literal['auto'] | None = 'auto', fill: Literal['ffill', 'bfill'] | None = 'ffill', - # Deprecated parameters (kept for backwards compatibility) - indexer: dict[FlowSystemDimensions, Any] | None = None, - heatmap_timeframes: Literal['YS', 'MS', 'W', 'D', 'h', '15min', 'min'] | None = None, - heatmap_timesteps_per_frame: Literal['W', 'D', 'h', '15min', 'min'] | None = None, - color_map: str | None = None, **plot_kwargs: Any, ) -> plotly.graph_objs.Figure | tuple[plt.Figure, plt.Axes]: """ @@ -1030,10 +1003,6 @@ def plot_heatmap( facet_cols=facet_cols, reshape_time=reshape_time, fill=fill, - indexer=indexer, - heatmap_timeframes=heatmap_timeframes, - heatmap_timesteps_per_frame=heatmap_timesteps_per_frame, - color_map=color_map, **plot_kwargs, ) @@ -1122,39 +1091,6 @@ def to_file( logger.log(SUCCESS_LEVEL, f'Saved optimization results "{name}" to {paths.model_documentation.parent}') -class CalculationResults(Results): - """DEPRECATED: Use Results instead. - - Backwards-compatible alias for Results class. - All functionality is inherited from Results. - """ - - def __init__(self, *args, **kwargs): - # Only warn if directly instantiating CalculationResults (not subclasses) - if self.__class__.__name__ == 'CalculationResults': - warnings.warn( - f'CalculationResults is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. Use Results instead.', - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) - - @classmethod - def from_calculation(cls, calculation: Optimization) -> CalculationResults: - """Create CalculationResults from a Calculation object. - - DEPRECATED: Use Results.from_optimization() instead. - Backwards-compatible method that redirects to from_optimization(). - - Args: - calculation: Calculation object with solved model. - - Returns: - CalculationResults: New instance with extracted results. - """ - return cls.from_optimization(calculation) - - class _ElementResults: def __init__(self, results: Results, label: str, variables: list[str], constraints: list[str]): self._results = results @@ -1265,8 +1201,6 @@ def plot_node_balance( facet_by: str | list[str] | None = 'scenario', animate_by: str | None = 'period', facet_cols: int | None = None, - # Deprecated parameter (kept for backwards compatibility) - indexer: dict[FlowSystemDimensions, Any] | None = None, **plot_kwargs: Any, ) -> plotly.graph_objs.Figure | tuple[plt.Figure, plt.Axes]: """ @@ -1367,22 +1301,6 @@ def plot_node_balance( >>> fig.update_layout(template='plotly_dark', width=1200, height=600) >>> fig.show() """ - # Handle deprecated indexer parameter - if indexer is not None: - # Check for conflict with new parameter - if select is not None: - raise ValueError( - "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." - ) - - warnings.warn( - f"The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - select = indexer - if engine not in {'plotly', 'matplotlib'}: raise ValueError(f'Engine "{engine}" not supported. Use one of ["plotly", "matplotlib"]') @@ -1450,8 +1368,6 @@ def plot_node_balance_pie( show: bool | None = None, engine: plotting.PlottingEngine = 'plotly', select: dict[FlowSystemDimensions, Any] | None = None, - # Deprecated parameter (kept for backwards compatibility) - indexer: dict[FlowSystemDimensions, Any] | None = None, **plot_kwargs: Any, ) -> plotly.graph_objs.Figure | tuple[plt.Figure, list[plt.Axes]]: """Plot pie chart of flow hours distribution. @@ -1501,22 +1417,6 @@ def plot_node_balance_pie( >>> results['Bus'].plot_node_balance_pie(save='figure.png', dpi=600) """ - # Handle deprecated indexer parameter - if indexer is not None: - # Check for conflict with new parameter - if select is not None: - raise ValueError( - "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." - ) - - warnings.warn( - f"The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - select = indexer - # Extract dpi for export_figure dpi = plot_kwargs.pop('dpi', None) # None uses CONFIG.Plotting.default_dpi @@ -1624,8 +1524,6 @@ def node_balance( unit_type: Literal['flow_rate', 'flow_hours'] = 'flow_rate', drop_suffix: bool = False, select: dict[FlowSystemDimensions, Any] | None = None, - # Deprecated parameter (kept for backwards compatibility) - indexer: dict[FlowSystemDimensions, Any] | None = None, ) -> xr.Dataset: """ Returns a dataset with the node balance of the Component or Bus. @@ -1640,22 +1538,6 @@ def node_balance( drop_suffix: Whether to drop the suffix from the variable names. select: Optional data selection dict. Supports single values, lists, slices, and index arrays. """ - # Handle deprecated indexer parameter - if indexer is not None: - # Check for conflict with new parameter - if select is not None: - raise ValueError( - "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." - ) - - warnings.warn( - f"The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - select = indexer - ds = self.solution[self.inputs + self.outputs] ds = sanitize_dataset( @@ -1716,8 +1598,6 @@ def plot_charge_state( facet_by: str | list[str] | None = 'scenario', animate_by: str | None = 'period', facet_cols: int | None = None, - # Deprecated parameter (kept for backwards compatibility) - indexer: dict[FlowSystemDimensions, Any] | None = None, **plot_kwargs: Any, ) -> plotly.graph_objs.Figure: """Plot storage charge state over time, combined with the node balance with optional faceting and animation. @@ -1786,22 +1666,6 @@ def plot_charge_state( >>> results['Storage'].plot_charge_state(save='storage.png', dpi=600) """ - # Handle deprecated indexer parameter - if indexer is not None: - # Check for conflict with new parameter - if select is not None: - raise ValueError( - "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." - ) - - warnings.warn( - f"The 'indexer' parameter is deprecated and will be removed in a future version. Use 'select' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - select = indexer - # Extract dpi for export_figure dpi = plot_kwargs.pop('dpi', None) # None uses CONFIG.Plotting.default_dpi @@ -2280,10 +2144,6 @@ def plot_heatmap( animate_by: str | None = None, facet_cols: int | None = None, fill: Literal['ffill', 'bfill'] | None = 'ffill', - # Deprecated parameters (kept for backwards compatibility) - heatmap_timeframes: Literal['YS', 'MS', 'W', 'D', 'h', '15min', 'min'] | None = None, - heatmap_timesteps_per_frame: Literal['W', 'D', 'h', '15min', 'min'] | None = None, - color_map: str | None = None, **plot_kwargs: Any, ) -> plotly.graph_objs.Figure | tuple[plt.Figure, plt.Axes]: """Plot heatmap of variable solution across segments. @@ -2302,9 +2162,6 @@ def plot_heatmap( animate_by: Dimension to animate over (Plotly only). facet_cols: Number of columns in the facet grid layout. fill: Method to fill missing values: 'ffill' or 'bfill'. - heatmap_timeframes: (Deprecated) Use reshape_time instead. - heatmap_timesteps_per_frame: (Deprecated) Use reshape_time instead. - color_map: (Deprecated) Use colors instead. **plot_kwargs: Additional plotting customization options. Common options: @@ -2320,41 +2177,6 @@ def plot_heatmap( Returns: Figure object. """ - # Handle deprecated parameters - if heatmap_timeframes is not None or heatmap_timesteps_per_frame is not None: - # Check for conflict with new parameter - if reshape_time != 'auto': # Check if user explicitly set reshape_time - raise ValueError( - "Cannot use both deprecated parameters 'heatmap_timeframes'/'heatmap_timesteps_per_frame' " - "and new parameter 'reshape_time'. Use only 'reshape_time'." - ) - - warnings.warn( - "The 'heatmap_timeframes' and 'heatmap_timesteps_per_frame' parameters are deprecated. " - f"Use 'reshape_time=(timeframes, timesteps_per_frame)' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - # Override reshape_time if old parameters provided - if heatmap_timeframes is not None and heatmap_timesteps_per_frame is not None: - reshape_time = (heatmap_timeframes, heatmap_timesteps_per_frame) - - if color_map is not None: - # Check for conflict with new parameter - if colors is not None: # Check if user explicitly set colors - raise ValueError( - "Cannot use both deprecated parameter 'color_map' and new parameter 'colors'. Use only 'colors'." - ) - - warnings.warn( - f"The 'color_map' parameter is deprecated. Use 'colors' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - colors = color_map - return plot_heatmap( data=self.solution_without_overlap(variable_name), name=variable_name, @@ -2412,40 +2234,6 @@ def to_file( logger.info(f'Saved optimization "{name}" to {path}') -class SegmentedCalculationResults(SegmentedResults): - """DEPRECATED: Use SegmentedResults instead. - - Backwards-compatible alias for SegmentedResults class. - All functionality is inherited from SegmentedResults. - """ - - def __init__(self, *args, **kwargs): - # Only warn if directly instantiating SegmentedCalculationResults (not subclasses) - if self.__class__.__name__ == 'SegmentedCalculationResults': - warnings.warn( - f'SegmentedCalculationResults is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' - 'Use SegmentedResults instead.', - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) - - @classmethod - def from_calculation(cls, calculation: SegmentedOptimization) -> SegmentedCalculationResults: - """Create SegmentedCalculationResults from a SegmentedCalculation object. - - DEPRECATED: Use SegmentedResults.from_optimization() instead. - Backwards-compatible method that redirects to from_optimization(). - - Args: - calculation: SegmentedCalculation object with solved model. - - Returns: - SegmentedCalculationResults: New instance with extracted results. - """ - return cls.from_optimization(calculation) - - def plot_heatmap( data: xr.DataArray | xr.Dataset, name: str | None = None, @@ -2462,11 +2250,6 @@ def plot_heatmap( | Literal['auto'] | None = 'auto', fill: Literal['ffill', 'bfill'] | None = 'ffill', - # Deprecated parameters (kept for backwards compatibility) - indexer: dict[str, Any] | None = None, - heatmap_timeframes: Literal['YS', 'MS', 'W', 'D', 'h', '15min', 'min'] | None = None, - heatmap_timesteps_per_frame: Literal['W', 'D', 'h', '15min', 'min'] | None = None, - color_map: str | None = None, **plot_kwargs: Any, ): """Plot heatmap visualization with support for multi-variable, faceting, and animation. @@ -2515,57 +2298,6 @@ def plot_heatmap( >>> plot_heatmap(dataset, animate_by='variable', reshape_time=('D', 'h')) """ - # Handle deprecated heatmap time parameters - if heatmap_timeframes is not None or heatmap_timesteps_per_frame is not None: - # Check for conflict with new parameter - if reshape_time != 'auto': # User explicitly set reshape_time - raise ValueError( - "Cannot use both deprecated parameters 'heatmap_timeframes'/'heatmap_timesteps_per_frame' " - "and new parameter 'reshape_time'. Use only 'reshape_time'." - ) - - warnings.warn( - "The 'heatmap_timeframes' and 'heatmap_timesteps_per_frame' parameters are deprecated. " - "Use 'reshape_time=(timeframes, timesteps_per_frame)' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - # Override reshape_time if both old parameters provided - if heatmap_timeframes is not None and heatmap_timesteps_per_frame is not None: - reshape_time = (heatmap_timeframes, heatmap_timesteps_per_frame) - - # Handle deprecated color_map parameter - if color_map is not None: - if colors is not None: # User explicitly set colors - raise ValueError( - "Cannot use both deprecated parameter 'color_map' and new parameter 'colors'. Use only 'colors'." - ) - - warnings.warn( - f"The 'color_map' parameter is deprecated. Use 'colors' instead." - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - colors = color_map - - # Handle deprecated indexer parameter - if indexer is not None: - # Check for conflict with new parameter - if select is not None: # User explicitly set select - raise ValueError( - "Cannot use both deprecated parameter 'indexer' and new parameter 'select'. Use only 'select'." - ) - - warnings.warn( - f"The 'indexer' parameter is deprecated. Use 'select' instead. " - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', - DeprecationWarning, - stacklevel=2, - ) - select = indexer - # Convert Dataset to DataArray with 'variable' dimension if isinstance(data, xr.Dataset): # Extract all data variables from the Dataset diff --git a/tests/test_config.py b/tests/test_config.py index 9c4f423ee..94d626af2 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -211,13 +211,6 @@ def test_attribute_modification(self): assert CONFIG.Modeling.big == 12345678 assert CONFIG.Solving.mip_gap == 0.001 - def test_change_logging_level_deprecated(self): - """Test deprecated change_logging_level function.""" - from flixopt import change_logging_level - - with pytest.warns(DeprecationWarning, match='change_logging_level is deprecated'): - change_logging_level('INFO') - def test_exception_logging(self, capfd): """Test that exceptions are properly logged with tracebacks.""" CONFIG.Logging.enable_console('INFO') diff --git a/tests/test_integration.py b/tests/test_integration.py index 6ac1e0467..35b2fa641 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -267,13 +267,13 @@ def modeling_calculation(self, request, flow_system_long, highs_solver): calc.do_modeling() calc.solve(highs_solver) elif modeling_type == 'segmented': - calc = fx.SegmentedCalculation('segModel', flow_system, timesteps_per_segment=96, overlap_timesteps=1) + calc = fx.SegmentedOptimization('segModel', flow_system, timesteps_per_segment=96, overlap_timesteps=1) calc.do_modeling_and_solve(highs_solver) elif modeling_type == 'aggregated': - calc = fx.AggregatedCalculation( + calc = fx.ClusteredOptimization( 'aggModel', flow_system, - fx.AggregationParameters( + fx.ClusteringParameters( hours_per_period=6, nr_of_periods=4, fix_storage_flows=False, diff --git a/tests/test_scenarios.py b/tests/test_scenarios.py index c952777b2..a5eb3d6a2 100644 --- a/tests/test_scenarios.py +++ b/tests/test_scenarios.py @@ -345,7 +345,7 @@ def test_scenarios_selection(flow_system_piecewise_conversion_scenarios): assert flow_system.scenarios.equals(flow_system_full.scenarios[0:2]) - np.testing.assert_allclose(flow_system.weights.values, flow_system_full.weights[0:2]) + np.testing.assert_allclose(flow_system.scenario_weights.values, flow_system_full.scenario_weights[0:2]) calc = fx.Optimization(flow_system=flow_system, name='test_scenarios_selection', normalize_weights=False) calc.do_modeling() @@ -357,8 +357,8 @@ def test_scenarios_selection(flow_system_piecewise_conversion_scenarios): np.testing.assert_allclose( calc.results.objective, ( - (calc.results.solution['costs'] * flow_system.weights).sum() - + (calc.results.solution['Penalty'] * flow_system.weights).sum() + (calc.results.solution['costs'] * flow_system.scenario_weights).sum() + + (calc.results.solution['Penalty'] * flow_system.scenario_weights).sum() ).item(), ) ## Account for rounding errors @@ -752,8 +752,8 @@ def test_weights_io_persistence(): fs_loaded = fx.FlowSystem.from_dataset(ds) # Verify weights persisted correctly - np.testing.assert_allclose(fs_loaded.weights.values, fs_original.weights.values) - assert fs_loaded.weights.dims == fs_original.weights.dims + np.testing.assert_allclose(fs_loaded.scenario_weights.values, fs_original.scenario_weights.values) + assert fs_loaded.scenario_weights.dims == fs_original.scenario_weights.dims def test_weights_selection(): @@ -788,7 +788,7 @@ def test_weights_selection(): # Verify weights are correctly sliced assert fs_subset.scenarios.equals(pd.Index(['base', 'high'], name='scenario')) - np.testing.assert_allclose(fs_subset.weights.values, custom_scenario_weights[[0, 2]]) + np.testing.assert_allclose(fs_subset.scenario_weights.values, custom_scenario_weights[[0, 2]]) # Verify weights are 1D with just scenario dimension (no period dimension) - assert fs_subset.weights.dims == ('scenario',) + assert fs_subset.scenario_weights.dims == ('scenario',) From 5fd9ff531cd8eb258918fc880fef9c9fcb410e96 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sun, 30 Nov 2025 05:51:56 +0100 Subject: [PATCH 09/49] Feature/excess rename (#501) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * | File | Changes | |-------------------------|----------------------------------------------------------------------------------| | flixopt/elements.py | Renamed attributes excess_input → virtual_supply, excess_output → virtual_demand | | flixopt/optimization.py | Updated attribute access and result keys | | tests/test_bus.py | Updated variable name strings in assertions | | docs/.../Bus.md | Updated description of φ symbols | The variable names in the optimization model are now: - {BusName}|virtual_supply (was excess_input) - {BusName}|virtual_demand (was excess_output) * Renamed excess_penalty_per_flow_hour → imbalance_penalty_per_flow_hour * rename excess_penalty to imbalance_penalty * Change default to None * Added self._validate_kwargs(kwargs) to catch typos and unexpected arguments * Renamed with_excess → allows_imbalance * Fix docstring * 1. docs/user-guide/mathematical-notation/elements/Bus.md - Fixed three typos: - "a imbalance_penalty_per_flow_hour" → "an imbalance_penalty_per_flow_hour" - "usefull" → "useful" - "ifeasiblity" → "infeasibility" 2. tests/test_bus.py - Updated comments to use the new imbalance terminology instead of the old "excess" terminology 3. flixopt/elements.py (BusModel) - Improved code clarity: - Changed eq_bus_balance.lhs -= -self.virtual_supply + self.virtual_demand to the more readable eq_bus_balance.lhs += self.virtual_supply - self.virtual_demand - Added a comment explaining the equation: # Σ(inflows) + virtual_supply = Σ(outflows) + virtual_demand - Combined the two separate add_share_to_effects calls into a single call with the combined expression (self.virtual_supply + self.virtual_demand) * imbalance_penalty All 12 bus tests pass with these changes. --- CHANGELOG.md | 8 ++ .../effects-penalty-objective.md | 2 +- .../mathematical-notation/elements/Bus.md | 8 +- examples/02_Complex/complex_example.py | 8 +- .../example_optimization_modes.py | 10 +-- flixopt/elements.py | 76 ++++++++++--------- flixopt/flow_system.py | 2 +- flixopt/optimization.py | 10 +-- tests/test_bus.py | 25 +++--- tests/test_functional.py | 4 +- 10 files changed, 82 insertions(+), 71 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d87e290f..7e181ee9a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -121,10 +121,18 @@ Use find-and-replace to update your code with the mappings above. The functional A partial backwards compatibility wrapper would be misleading, so we opted for a clean breaking change. +- `Bus.imbalance_penalty_per_flow_hour` now defaults to `None` (strict balance) instead of `1e5` + ### ♻️ Changed +- Renamed `BusModel.excess_input` → `virtual_supply` and `BusModel.excess_output` → `virtual_demand` for clearer semantics +- Renamed `Bus.excess_penalty_per_flow_hour` → `imbalance_penalty_per_flow_hour` +- Renamed `Bus.with_excess` → `allows_imbalance` + ### 🗑️ Deprecated +- `Bus.excess_penalty_per_flow_hour` → use `imbalance_penalty_per_flow_hour` + ### 🔥 Removed **Modules removed:** diff --git a/docs/user-guide/mathematical-notation/effects-penalty-objective.md b/docs/user-guide/mathematical-notation/effects-penalty-objective.md index aeab09031..fd8a97a1d 100644 --- a/docs/user-guide/mathematical-notation/effects-penalty-objective.md +++ b/docs/user-guide/mathematical-notation/effects-penalty-objective.md @@ -197,7 +197,7 @@ Where: - $s_{l \rightarrow \Phi, \text{per}}$ is the periodic penalty share from element $l$ - $s_{l \rightarrow \Phi, \text{temp}}(\text{t}_i)$ is the temporal penalty share from element $l$ at timestep $\text{t}_i$ -**Primary usage:** Penalties occur in [Buses](elements/Bus.md) via the `excess_penalty_per_flow_hour` parameter, which allows nodal imbalances at a high cost, and in time series aggregation to allow period flexibility. +**Primary usage:** Penalties occur in [Buses](elements/Bus.md) via the `imbalance_penalty_per_flow_hour` parameter, which allows nodal imbalances at a high cost, and in time series aggregation to allow period flexibility. **Key properties:** - Penalty shares are added via `add_share_to_effects(name, expressions={fx.PENALTY_EFFECT_LABEL: ...}, target='temporal'/'periodic')` diff --git a/docs/user-guide/mathematical-notation/elements/Bus.md b/docs/user-guide/mathematical-notation/elements/Bus.md index bfe57d234..5028e8ef7 100644 --- a/docs/user-guide/mathematical-notation/elements/Bus.md +++ b/docs/user-guide/mathematical-notation/elements/Bus.md @@ -5,8 +5,8 @@ $$ \label{eq:bus_balance} \sum_{f_\text{out} \in \mathcal{F}_\text{out}} p_{f_\text{out}}(\text{t}_i) $$ -Optionally, a Bus can have a `excess_penalty_per_flow_hour` parameter, which allows to penaltize the balance for missing or excess flow-rates. -This is usefull as it handles a possible ifeasiblity gently. +Optionally, a Bus can have an `imbalance_penalty_per_flow_hour` parameter, which allows to penalize the balance for missing or excess flow-rates. +This is useful as it handles a possible infeasibility gently. This changes the balance to @@ -27,10 +27,10 @@ With: - $\mathcal{F}_\text{in}$ and $\mathcal{F}_\text{out}$ being the set of all incoming and outgoing flows - $p_{f_\text{in}}(\text{t}_i)$ and $p_{f_\text{out}}(\text{t}_i)$ being the flow-rate at time $\text{t}_i$ for flow $f_\text{in}$ and $f_\text{out}$, respectively -- $\phi_\text{in}(\text{t}_i)$ and $\phi_\text{out}(\text{t}_i)$ being the missing or excess flow-rate at time $\text{t}_i$, respectively +- $\phi_\text{in}(\text{t}_i)$ and $\phi_\text{out}(\text{t}_i)$ being the virtual supply and virtual demand at time $\text{t}_i$, respectively - $\text{t}_i$ being the time step - $s_{b \rightarrow \Phi}(\text{t}_i)$ being the penalty term -- $\text a_{b \rightarrow \Phi}(\text{t}_i)$ being the penalty coefficient (`excess_penalty_per_flow_hour`) +- $\text a_{b \rightarrow \Phi}(\text{t}_i)$ being the penalty coefficient (`imbalance_penalty_per_flow_hour`) --- diff --git a/examples/02_Complex/complex_example.py b/examples/02_Complex/complex_example.py index b86c0e9de..3806fde40 100644 --- a/examples/02_Complex/complex_example.py +++ b/examples/02_Complex/complex_example.py @@ -13,7 +13,7 @@ # --- Experiment Options --- # Configure options for testing various parameters and behaviors check_penalty = False - excess_penalty = 1e5 + imbalance_penalty = 1e5 use_chp_with_piecewise_conversion = True time_indices = None # Define specific time steps for custom optimizations, or use the entire series @@ -34,9 +34,9 @@ # --- Define Energy Buses --- # Represent node balances (inputs=outputs) for the different energy carriers (electricity, heat, gas) in the system flow_system.add_elements( - fx.Bus('Strom', excess_penalty_per_flow_hour=excess_penalty), - fx.Bus('Fernwärme', excess_penalty_per_flow_hour=excess_penalty), - fx.Bus('Gas', excess_penalty_per_flow_hour=excess_penalty), + fx.Bus('Strom', imbalance_penalty_per_flow_hour=imbalance_penalty), + fx.Bus('Fernwärme', imbalance_penalty_per_flow_hour=imbalance_penalty), + fx.Bus('Gas', imbalance_penalty_per_flow_hour=imbalance_penalty), ) # --- Define Effects --- diff --git a/examples/03_Optimization_modes/example_optimization_modes.py b/examples/03_Optimization_modes/example_optimization_modes.py index 009c008d9..8f26d84b4 100644 --- a/examples/03_Optimization_modes/example_optimization_modes.py +++ b/examples/03_Optimization_modes/example_optimization_modes.py @@ -41,7 +41,7 @@ def get_solutions(optimizations: list, variable: str) -> xr.Dataset: penalty_of_period_freedom=0, ) keep_extreme_periods = True - excess_penalty = 1e5 # or set to None if not needed + imbalance_penalty = 1e5 # or set to None if not needed # Data Import data_import = pd.read_csv( @@ -67,10 +67,10 @@ def get_solutions(optimizations: list, variable: str) -> xr.Dataset: flow_system = fx.FlowSystem(timesteps) flow_system.add_elements( - fx.Bus('Strom', excess_penalty_per_flow_hour=excess_penalty), - fx.Bus('Fernwärme', excess_penalty_per_flow_hour=excess_penalty), - fx.Bus('Gas', excess_penalty_per_flow_hour=excess_penalty), - fx.Bus('Kohle', excess_penalty_per_flow_hour=excess_penalty), + fx.Bus('Strom', imbalance_penalty_per_flow_hour=imbalance_penalty), + fx.Bus('Fernwärme', imbalance_penalty_per_flow_hour=imbalance_penalty), + fx.Bus('Gas', imbalance_penalty_per_flow_hour=imbalance_penalty), + fx.Bus('Kohle', imbalance_penalty_per_flow_hour=imbalance_penalty), ) # Effects diff --git a/flixopt/elements.py b/flixopt/elements.py index ae31f34c6..9ca938b62 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -195,9 +195,9 @@ class Bus(Element): Args: label: The label of the Element. Used to identify it in the FlowSystem. - excess_penalty_per_flow_hour: Penalty costs for bus balance violations. - When None, no excess/deficit is allowed (hard constraint). When set to a - value > 0, allows bus imbalances at penalty cost. Default is 1e5 (high penalty). + imbalance_penalty_per_flow_hour: Penalty costs for bus balance violations. + When None (default), no imbalance is allowed (hard constraint). When set to a + value > 0, allows bus imbalances at penalty cost. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -207,7 +207,7 @@ class Bus(Element): ```python electricity_bus = Bus( label='main_electrical_bus', - excess_penalty_per_flow_hour=None, # No imbalance allowed + imbalance_penalty_per_flow_hour=None, # No imbalance allowed ) ``` @@ -216,7 +216,7 @@ class Bus(Element): ```python heat_network = Bus( label='district_heating_network', - excess_penalty_per_flow_hour=1000, # €1000/MWh penalty for imbalance + imbalance_penalty_per_flow_hour=1000, # €1000/MWh penalty for imbalance ) ``` @@ -225,14 +225,14 @@ class Bus(Element): ```python material_hub = Bus( label='material_processing_hub', - excess_penalty_per_flow_hour=waste_disposal_costs, # Time series + imbalance_penalty_per_flow_hour=waste_disposal_costs, # Time series ) ``` Note: - The bus balance equation enforced is: Σ(inflows) = Σ(outflows) + excess - deficit + The bus balance equation enforced is: Σ(inflows) + virtual_supply = Σ(outflows) + virtual_demand - When excess_penalty_per_flow_hour is None, excess and deficit are forced to zero. + When imbalance_penalty_per_flow_hour is None, virtual_supply and virtual_demand are forced to zero. When a penalty cost is specified, the optimization can choose to violate the balance if economically beneficial, paying the penalty. The penalty is added to the objective directly. @@ -246,11 +246,16 @@ class Bus(Element): def __init__( self, label: str, - excess_penalty_per_flow_hour: Numeric_TPS | None = 1e5, + imbalance_penalty_per_flow_hour: Numeric_TPS | None = None, meta_data: dict | None = None, + **kwargs, ): super().__init__(label, meta_data=meta_data) - self.excess_penalty_per_flow_hour = excess_penalty_per_flow_hour + imbalance_penalty_per_flow_hour = self._handle_deprecated_kwarg( + kwargs, 'excess_penalty_per_flow_hour', 'imbalance_penalty_per_flow_hour', imbalance_penalty_per_flow_hour + ) + self._validate_kwargs(kwargs) + self.imbalance_penalty_per_flow_hour = imbalance_penalty_per_flow_hour self.inputs: list[Flow] = [] self.outputs: list[Flow] = [] @@ -267,16 +272,16 @@ def _set_flow_system(self, flow_system) -> None: def transform_data(self, name_prefix: str = '') -> None: prefix = '|'.join(filter(None, [name_prefix, self.label_full])) - self.excess_penalty_per_flow_hour = self._fit_coords( - f'{prefix}|excess_penalty_per_flow_hour', self.excess_penalty_per_flow_hour + self.imbalance_penalty_per_flow_hour = self._fit_coords( + f'{prefix}|imbalance_penalty_per_flow_hour', self.imbalance_penalty_per_flow_hour ) def _plausibility_checks(self) -> None: - if self.excess_penalty_per_flow_hour is not None: - zero_penalty = np.all(np.equal(self.excess_penalty_per_flow_hour, 0)) + if self.imbalance_penalty_per_flow_hour is not None: + zero_penalty = np.all(np.equal(self.imbalance_penalty_per_flow_hour, 0)) if zero_penalty: logger.warning( - f'In Bus {self.label_full}, the excess_penalty_per_flow_hour is 0. Use "None" or a value > 0.' + f'In Bus {self.label_full}, the imbalance_penalty_per_flow_hour is 0. Use "None" or a value > 0.' ) if len(self.inputs) == 0 and len(self.outputs) == 0: raise ValueError( @@ -284,8 +289,8 @@ def _plausibility_checks(self) -> None: ) @property - def with_excess(self) -> bool: - return False if self.excess_penalty_per_flow_hour is None else True + def allows_imbalance(self) -> bool: + return self.imbalance_penalty_per_flow_hour is not None def __repr__(self) -> str: """Return string representation.""" @@ -856,8 +861,8 @@ class BusModel(ElementModel): element: Bus # Type hint def __init__(self, model: FlowSystemModel, element: Bus): - self.excess_input: linopy.Variable | None = None - self.excess_output: linopy.Variable | None = None + self.virtual_supply: linopy.Variable | None = None + self.virtual_demand: linopy.Variable | None = None super().__init__(model, element) def _do_modeling(self): @@ -870,39 +875,38 @@ def _do_modeling(self): outputs = sum([flow.submodel.flow_rate for flow in self.element.outputs]) eq_bus_balance = self.add_constraints(inputs == outputs, short_name='balance') - # Add excess to balance and penalty if needed - if self.element.with_excess: - excess_penalty = np.multiply(self._model.hours_per_step, self.element.excess_penalty_per_flow_hour) + # Add virtual supply/demand to balance and penalty if needed + if self.element.allows_imbalance: + imbalance_penalty = np.multiply(self._model.hours_per_step, self.element.imbalance_penalty_per_flow_hour) - self.excess_input = self.add_variables(lower=0, coords=self._model.get_coords(), short_name='excess_input') + self.virtual_supply = self.add_variables( + lower=0, coords=self._model.get_coords(), short_name='virtual_supply' + ) - self.excess_output = self.add_variables( - lower=0, coords=self._model.get_coords(), short_name='excess_output' + self.virtual_demand = self.add_variables( + lower=0, coords=self._model.get_coords(), short_name='virtual_demand' ) - eq_bus_balance.lhs -= -self.excess_input + self.excess_output + # Σ(inflows) + virtual_supply = Σ(outflows) + virtual_demand + eq_bus_balance.lhs += self.virtual_supply - self.virtual_demand # Add penalty shares as temporal effects (time-dependent) from .effects import PENALTY_EFFECT_LABEL + total_imbalance_penalty = (self.virtual_supply + self.virtual_demand) * imbalance_penalty self._model.effects.add_share_to_effects( name=self.label_of_element, - expressions={PENALTY_EFFECT_LABEL: self.excess_input * excess_penalty}, - target='temporal', - ) - self._model.effects.add_share_to_effects( - name=self.label_of_element, - expressions={PENALTY_EFFECT_LABEL: self.excess_output * excess_penalty}, + expressions={PENALTY_EFFECT_LABEL: total_imbalance_penalty}, target='temporal', ) def results_structure(self): inputs = [flow.submodel.flow_rate.name for flow in self.element.inputs] outputs = [flow.submodel.flow_rate.name for flow in self.element.outputs] - if self.excess_input is not None: - inputs.append(self.excess_input.name) - if self.excess_output is not None: - outputs.append(self.excess_output.name) + if self.virtual_supply is not None: + inputs.append(self.virtual_supply.name) + if self.virtual_demand is not None: + outputs.append(self.virtual_demand.name) return { **super().results_structure(), 'inputs': inputs, diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 98d4c5c0b..9015de3e4 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -77,7 +77,7 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): >>> >>> # Add elements to the system >>> boiler = fx.Component('Boiler', inputs=[heat_flow], status_parameters=...) - >>> heat_bus = fx.Bus('Heat', excess_penalty_per_flow_hour=1e4) + >>> heat_bus = fx.Bus('Heat', imbalance_penalty_per_flow_hour=1e4) >>> costs = fx.Effect('costs', is_objective=True, is_standard=True) >>> flow_system.add_elements(boiler, heat_bus, costs) diff --git a/flixopt/optimization.py b/flixopt/optimization.py index 1de1bbc49..529975df7 100644 --- a/flixopt/optimization.py +++ b/flixopt/optimization.py @@ -309,15 +309,15 @@ def main_results(self) -> dict[str, int | float | dict]: 'Buses with excess': [ { bus.label_full: { - 'input': bus.submodel.excess_input.solution.sum('time'), - 'output': bus.submodel.excess_output.solution.sum('time'), + 'virtual_supply': bus.submodel.virtual_supply.solution.sum('time'), + 'virtual_demand': bus.submodel.virtual_demand.solution.sum('time'), } } for bus in self.flow_system.buses.values() - if bus.with_excess + if bus.allows_imbalance and ( - bus.submodel.excess_input.solution.sum().item() > 1e-3 - or bus.submodel.excess_output.solution.sum().item() > 1e-3 + bus.submodel.virtual_supply.solution.sum().item() > 1e-3 + or bus.submodel.virtual_demand.solution.sum().item() > 1e-3 ) ], } diff --git a/tests/test_bus.py b/tests/test_bus.py index f1497a0ec..cc49a2073 100644 --- a/tests/test_bus.py +++ b/tests/test_bus.py @@ -9,7 +9,7 @@ class TestBusModel: def test_bus(self, basic_flow_system_linopy_coords, coords_config): """Test that flow model constraints are correctly generated.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config - bus = fx.Bus('TestBus', excess_penalty_per_flow_hour=None) + bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=None) flow_system.add_elements( bus, fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]), @@ -28,7 +28,7 @@ def test_bus(self, basic_flow_system_linopy_coords, coords_config): def test_bus_penalty(self, basic_flow_system_linopy_coords, coords_config): """Test that flow model constraints are correctly generated.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config - bus = fx.Bus('TestBus') + bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=1e5) flow_system.add_elements( bus, fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]), @@ -37,26 +37,26 @@ def test_bus_penalty(self, basic_flow_system_linopy_coords, coords_config): model = create_linopy_model(flow_system) assert set(bus.submodel.variables) == { - 'TestBus|excess_input', - 'TestBus|excess_output', + 'TestBus|virtual_supply', + 'TestBus|virtual_demand', 'WärmelastTest(Q_th_Last)|flow_rate', 'GastarifTest(Q_Gas)|flow_rate', } assert set(bus.submodel.constraints) == {'TestBus|balance'} assert_var_equal( - model.variables['TestBus|excess_input'], model.add_variables(lower=0, coords=model.get_coords()) + model.variables['TestBus|virtual_supply'], model.add_variables(lower=0, coords=model.get_coords()) ) assert_var_equal( - model.variables['TestBus|excess_output'], model.add_variables(lower=0, coords=model.get_coords()) + model.variables['TestBus|virtual_demand'], model.add_variables(lower=0, coords=model.get_coords()) ) assert_conequal( model.constraints['TestBus|balance'], model.variables['GastarifTest(Q_Gas)|flow_rate'] - model.variables['WärmelastTest(Q_th_Last)|flow_rate'] - + model.variables['TestBus|excess_input'] - - model.variables['TestBus|excess_output'] + + model.variables['TestBus|virtual_supply'] + - model.variables['TestBus|virtual_demand'] == 0, ) @@ -65,8 +65,7 @@ def test_bus_penalty(self, basic_flow_system_linopy_coords, coords_config): assert 'TestBus->Penalty(temporal)' in model.constraints assert 'TestBus->Penalty(temporal)' in model.variables - # The penalty share should equal the excess times the penalty cost - # Note: Each excess (input and output) creates its own share constraint, so we have two + # The penalty share should equal the imbalance (virtual_supply + virtual_demand) times the penalty cost # Let's verify the total penalty contribution by checking the effect's temporal model penalty_effect = flow_system.effects.penalty_effect assert penalty_effect.submodel is not None @@ -75,14 +74,14 @@ def test_bus_penalty(self, basic_flow_system_linopy_coords, coords_config): assert_conequal( model.constraints['TestBus->Penalty(temporal)'], model.variables['TestBus->Penalty(temporal)'] - == model.variables['TestBus|excess_input'] * 1e5 * model.hours_per_step - + model.variables['TestBus|excess_output'] * 1e5 * model.hours_per_step, + == model.variables['TestBus|virtual_supply'] * 1e5 * model.hours_per_step + + model.variables['TestBus|virtual_demand'] * 1e5 * model.hours_per_step, ) def test_bus_with_coords(self, basic_flow_system_linopy_coords, coords_config): """Test bus behavior across different coordinate configurations.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config - bus = fx.Bus('TestBus', excess_penalty_per_flow_hour=None) + bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=None) flow_system.add_elements( bus, fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]), diff --git a/tests/test_functional.py b/tests/test_functional.py index 4b5c6c686..f351deef5 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -66,8 +66,8 @@ def flow_system_base(timesteps: pd.DatetimeIndex) -> fx.FlowSystem: flow_system = fx.FlowSystem(timesteps) flow_system.add_elements( - fx.Bus('Fernwärme', excess_penalty_per_flow_hour=None), - fx.Bus('Gas', excess_penalty_per_flow_hour=None), + fx.Bus('Fernwärme', imbalance_penalty_per_flow_hour=None), + fx.Bus('Gas', imbalance_penalty_per_flow_hour=None), ) flow_system.add_elements(fx.Effect('costs', '€', 'Kosten', is_standard=True, is_objective=True)) flow_system.add_elements( From dbc0a78e5033e851733d72e76267897a7b6d3343 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sun, 30 Nov 2025 06:07:13 +0100 Subject: [PATCH 10/49] Feature/docs improvement (#481) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Flow.md - Fully restructured with: - Tab-based organization (Core / Advanced / Patterns / Examples) - Collapsible definition blocks - Links to both Flow and FlowModel classes - Updated docstrings with absolute URLs 2. Bus.md - Restructured with tab organization and dual class linking 3. Storage.md - Restructured with comprehensive examples and dual class linking 4. LinearConverter.md - Restructured with detailed examples including specialized converters 5. InvestParameters.md - Restructured with clear separation of core vs. advanced features * Improve organization * Improve organization * Improve organization by using tables * Improve organization by using tables and use eqref * Add symbol to parameter mapping * Changed to inline math * Use propre constraints with numbering * Move parameters into separate tab * Reorder parameters * : Use the columns "symbol" and "python name" in the variables tab * Update Bus, Storage, and LinearConverter.md * Update InvestParameters and OnOffParameters.md * Update Piecewise.md * Compact effects-penalty-objective.md * Allow toc level 3 * Add toc to homepage * Replace ustom css with mkdocs material stuff * Revert some * Revert some * Remove layout css rule * Show toc on homepage * FIx broken link * Add edit uri * Hide bottom part * Hide bottom part * Restructure docs * Show navigation in home * Add Changelog fromating * THighten CHANGELOG.md * Simplify users.md * Simplify models.md * Shorten citing.md * Shorten support.md * Update CHANGELOG.md * Simplify installation.md * Simplify quick-start.md * Updated FullCalculation → Optimization in documentation Fixed mkdocs.yml navigation Fixed broken link in support.md * Fixed solver calls in docs * Move files and restructure * Delete old docs script * Improve docs structure * Imrpove Optimization Modes * Imrpove Optimization Modes * Rewrite the core concepts to be user facing * Reorganize Mathematical Notation * 1. Minimal variable names — Changed from words to symbols: - penalty_rate → $c_\phi$ - relative_min → $p_{rel}^{min}$ - flow_hours → $h_f$ - loss → $\dot{c}_{loss}$ - etc. 2. Tabs for conditional constraints — Used === "Tab Name" syntax for: - Bus.md: "Without Excess (Strict)" vs "With Excess (Soft)" - Flow.md: "Standard (No On/Off)" vs "With On/Off" vs "Fixed Profile" - Storage.md: "Fixed Initial" vs "Cyclic" vs "Final Bounds" - LinearConverter.md: "Single Input/Output" vs "Multiple Outputs" vs "COP > 1" vs "Time-Varying" - Effects.md: "Temporal (Operational)" vs "Periodic (Investment)" vs "Total" 3. Corrected Flow constraints — Clarified that: - Without on/off parameters: flow cannot be zero if relative_minimum > 0 - With on/off parameters: flow can be zero (when off) OR within bounds (when on) 4. Cleaner structure — Removed redundant content, focused on essential formulas and examples * The Flow.md now has four tabs for capacity bounds: 1. Fixed Size — Standard bounds without on/off 2. Fixed Size + On/Off — Can be zero when off 3. Variable Size — Investment decision on capacity 4. Variable Size + On/Off — Both investment and on/off, with big-M linearization for the bilinear term $s(t) \cdot P$ * InvestParameters.md: - Story-driven intro with real-world examples - Core concept: "Size as a Variable" - Tabs for: Binary (Fixed Size) | Continuous (Size Range) | Mandatory - Tabs for effects: Fixed | Specific | Retirement | Piecewise - Minimal variable names: $P$, $s_{inv}$, $c_{fix}$, $c_{spec}$, etc. - Cost annualization formula OnOffParameters.md: - Story-driven intro with real-world examples - Core concept: "Binary State" with flow bound modification - Tabs for state transitions: Switch Detection | Startup Costs | Running Costs - Tabs for duration constraints: Min Run Time | Min Off Time | Max Run Time | Total Hours | Max Startups - Minimal variable names: $s(t)$, $s^{on}(t)$, $s^{off}(t)$, $T_{on}^{min}$, etc. Piecewise.md: - Story-driven intro with ASCII diagram - Core concept: Linear segments with weighted combinations - Tabs for constraints: Single Piece Active | With Zero Point - Tabs for piece patterns: Continuous (Touching) | Gap (Forbidden Region) | Zero Point - Minimal variable names: $\beta_k$, $\lambda_0$, $\lambda_1$, etc. - Practical examples for heat pumps, boilers, and investment * Make OnOffParameters better * Piecewise.md: - Replaced the useless ASCII diagram with a more informative one showing: - Actual axis labels (input/output) - Numeric values on axes - Two pieces with their connection point labeled - Clear visual of how pieces connect at (50, 45) - Shows the start/end points notation * Add plotly chart * Add custom javascript * Remove charts plugin * Add missing docs file * Fix quick start * Delete model.md * Update citation * Update license.md * Simplify faq, support and troubleshooting.md * Remove old workflow * 1. Renamed OnOffParameters.md → StatusParameters.md 2. Updated all terminology: - on_off_parameters → status_parameters - OnOffParameters → StatusParameters - effects_per_switch_on → effects_per_startup - effects_per_running_hour → effects_per_active_hour - consecutive_on_hours_min → min_uptime - consecutive_on_hours_max → max_uptime - consecutive_off_hours_min → min_downtime - on_hours_min/max → active_hours_min/max - switch_on_max → startup_limit - switch_on/switch_off → startup/shutdown - "on/off" language → "active/inactive" language 3. Updated references in Flow.md, LinearConverter.md, and effects-penalty-objective.md * Remove Modeling patterns from docs * Simplify docs * Improve LinearConverter.md * Improve Flow.md * Improve effects-penalty-objective.md * Improve InvestParameters.md * Add durtaion constraints * Update Piecewise stuff * Update Piecewise stuff * Update Piecewise stuff * Combine effects and dimensions into one tab * The dimension examples now correctly show how to assign them to FlowSystem using pd.Index: * Update effects-and-dimensions.md * Update effects-and-dimensions.md * updated all reference tables across all Mathematical Notation pages to be consistent * updated all reference tables across all Mathematical Notation pages to be consistent * updated all reference tables across all Mathematical Notation pages to be consistent --- .gitignore | 4 + CHANGELOG.md | 20 +- README.md | 2 +- docs/getting-started.md | 65 --- docs/home/citing.md | 29 ++ docs/home/installation.md | 91 ++++ docs/home/license.md | 43 ++ docs/home/quick-start.md | 132 ++++++ docs/home/users.md | 27 ++ docs/index.md | 113 +++-- docs/javascripts/plotly-instant.js | 30 ++ docs/roadmap.md | 2 +- docs/stylesheets/extra.css | 115 +---- docs/user-guide/building-models/index.md | 20 + docs/user-guide/core-concepts.md | 263 ++++++----- docs/user-guide/faq.md | 34 ++ docs/user-guide/index.md | 83 ++++ .../mathematical-notation/dimensions.md | 316 ------------- .../effects-and-dimensions.md | 415 ++++++++++++++++++ .../effects-penalty-objective.md | 337 -------------- .../mathematical-notation/elements/Bus.md | 69 +-- .../mathematical-notation/elements/Flow.md | 146 ++++-- .../elements/LinearConverter.md | 159 +++++-- .../mathematical-notation/elements/Storage.md | 149 ++++--- .../features/InvestParameters.md | 363 +++++---------- .../features/OnOffParameters.md | 0 .../features/Piecewise.md | 162 +++++-- .../features/StatusParameters.md | 355 ++++----------- .../user-guide/mathematical-notation/index.md | 160 +++---- .../modeling-patterns/bounds-and-states.md | 171 -------- .../modeling-patterns/duration-tracking.md | 164 ------- .../modeling-patterns/index.md | 54 --- .../modeling-patterns/state-transitions.md | 235 ---------- .../mathematical-notation/others.md | 3 - docs/user-guide/optimization/index.md | 195 ++++++++ docs/user-guide/results/index.md | 18 + docs/user-guide/support.md | 23 + docs/user-guide/troubleshooting.md | 61 +++ flixopt/components.py | 35 +- flixopt/effects.py | 25 +- flixopt/elements.py | 24 +- flixopt/features.py | 29 +- flixopt/interface.py | 16 +- mkdocs.yml | 77 +++- pyproject.toml | 1 + scripts/extract_changelog.py | 151 ------- scripts/format_changelog.py | 82 ++++ scripts/gen_ref_pages.py | 2 +- 48 files changed, 2413 insertions(+), 2657 deletions(-) delete mode 100644 docs/getting-started.md create mode 100644 docs/home/citing.md create mode 100644 docs/home/installation.md create mode 100644 docs/home/license.md create mode 100644 docs/home/quick-start.md create mode 100644 docs/home/users.md create mode 100644 docs/javascripts/plotly-instant.js create mode 100644 docs/user-guide/building-models/index.md create mode 100644 docs/user-guide/faq.md create mode 100644 docs/user-guide/index.md delete mode 100644 docs/user-guide/mathematical-notation/dimensions.md create mode 100644 docs/user-guide/mathematical-notation/effects-and-dimensions.md delete mode 100644 docs/user-guide/mathematical-notation/effects-penalty-objective.md delete mode 100644 docs/user-guide/mathematical-notation/features/OnOffParameters.md delete mode 100644 docs/user-guide/mathematical-notation/modeling-patterns/bounds-and-states.md delete mode 100644 docs/user-guide/mathematical-notation/modeling-patterns/duration-tracking.md delete mode 100644 docs/user-guide/mathematical-notation/modeling-patterns/index.md delete mode 100644 docs/user-guide/mathematical-notation/modeling-patterns/state-transitions.md delete mode 100644 docs/user-guide/mathematical-notation/others.md create mode 100644 docs/user-guide/optimization/index.md create mode 100644 docs/user-guide/results/index.md create mode 100644 docs/user-guide/support.md create mode 100644 docs/user-guide/troubleshooting.md delete mode 100644 scripts/extract_changelog.py create mode 100644 scripts/format_changelog.py diff --git a/.gitignore b/.gitignore index cc2179b07..169c1a587 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,7 @@ venv/ .DS_Store lib/ temp-plot.html +.cache +site/ +*.egg-info +uv.lock diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e181ee9a..914ef2666 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,7 @@ This contains all commits, PRs, and contributors. Therefore, the Changelog should focus on the user-facing changes. Please remove all irrelevant sections before releasing. -Please keep the format of the changelog consistent with the other releases, so the extraction for mkdocs works. +Please keep the format of the changelog consistent: ## [VERSION] - YYYY-MM-DD --- ## [Template] - ????-??-?? @@ -49,11 +49,11 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp --- -## [Unreleased] - ????-??-?? +Until here --> -**Summary**: Renamed OnOff terminology to Status terminology for better alignment with PyPSA and unit commitment standards. **All deprecated items from v4.x have been removed.** +## [Upcoming] -### ✨ Added +**Summary**: Renamed OnOff terminology to Status terminology for better alignment with PyPSA and unit commitment standards. **All deprecated items from v4.x have been removed.** ### 💥 Breaking Changes @@ -212,22 +212,14 @@ A partial backwards compatibility wrapper would be misleading, so we opted for a - Flow parameters: `Q_fu` → use `fuel_flow`, `P_el` → use `electrical_flow`, `Q_th` → use `thermal_flow`, `Q_ab` → use `heat_source_flow` - Efficiency parameters: `eta` → use `thermal_efficiency`, `eta_th` → use `thermal_efficiency`, `eta_el` → use `electrical_efficiency`, `COP` → use `cop` -### 🐛 Fixed - -### 🔒 Security - -### 📦 Dependencies ### 📝 Docs +- Improve documentation from the ground up -### 👷 Development - -### 🚧 Known Issues +This is not yet publicly released! --- -Until here --> - ## [4.3.5] - 2025-11-29 **Summary**: Fix zenodo again diff --git a/README.md b/README.md index 6d049819d..339a40b41 100644 --- a/README.md +++ b/README.md @@ -96,7 +96,7 @@ boiler = fx.Boiler("Boiler", eta=0.9, ...) ### Key Features **Multi-criteria optimization:** Model costs, emissions, resource use - any custom metric. Optimize single objectives or use weighted combinations and ε-constraints. -→ [Effects documentation](https://flixopt.github.io/flixopt/latest/user-guide/mathematical-notation/effects-penalty-objective/) +→ [Effects documentation](https://flixopt.github.io/flixopt/latest/user-guide/mathematical-notation/effects-and-dimensions/) **Performance at any scale:** Choose optimization modes without changing your model - Optimization, SegmentedOptimization, or ClusteredOptimization (using [TSAM](https://github.com/FZJ-IEK3-VSA/tsam)). → [Optimization modes](https://flixopt.github.io/flixopt/latest/api-reference/optimization/) diff --git a/docs/getting-started.md b/docs/getting-started.md deleted file mode 100644 index 0cdd2a5a7..000000000 --- a/docs/getting-started.md +++ /dev/null @@ -1,65 +0,0 @@ -# Getting Started with FlixOpt - -This guide will help you install FlixOpt, understand its basic concepts, and run your first optimization model. - -## Installation - -### Basic Installation - -Install FlixOpt directly into your environment using pip: - -```bash -pip install flixopt -``` - -This provides the core functionality with the HiGHS solver included. - -### Full Installation - -For all features including interactive network visualizations and time series aggregation: - -```bash -pip install "flixopt[full]" -``` - -## Logging - -FlixOpt uses Python's standard logging module with optional colored output via [colorlog](https://github.com/borntyping/python-colorlog). Logging is silent by default but can be easily configured. - -```python -from flixopt import CONFIG - -# Enable colored console logging -CONFIG.Logging.enable_console('INFO') - -# Or use a preset configuration for exploring -CONFIG.exploring() -``` - -For advanced logging configuration, you can use Python's standard logging module directly: - -```python -import logging -logging.basicConfig(level=logging.DEBUG) -``` - -For more details on logging configuration, see the [`CONFIG.Logging`][flixopt.config.CONFIG.Logging] documentation. - -## Basic Workflow - -Working with FlixOpt follows a general pattern: - -1. **Create a [`FlowSystem`][flixopt.flow_system.FlowSystem]** with a time series -2. **Define [`Effects`][flixopt.effects.Effect]** (costs, emissions, etc.) -3. **Define [`Buses`][flixopt.elements.Bus]** as connection points in your system -4. **Add [`Components`][flixopt.components]** like converters, storage, sources/sinks with their Flows -5. **Run [`Optimizations`][flixopt.optimization]** to optimize your system -6. **Analyze [`Results`][flixopt.results]** using built-in or external visualization tools - -## Next Steps - -Now that you've installed FlixOpt and understand the basic workflow, you can: - -- Learn about the [core concepts of flixopt](user-guide/core-concepts.md) -- Explore some [examples](examples/index.md) -- Check the [API reference](api-reference/index.md) for detailed documentation diff --git a/docs/home/citing.md b/docs/home/citing.md new file mode 100644 index 000000000..6fd1a6020 --- /dev/null +++ b/docs/home/citing.md @@ -0,0 +1,29 @@ +# Citing flixOpt + +If you use flixOpt in your research, please cite it. + +## Citation + +When referencing flixOpt in academic publications, please use look here: [flixopt citation](https://zenodo.org/records/17756895) + +## Publications + +If you've published research using flixOpt, please let us know! We'd love to feature it here. + +### List of Publications + +*Coming soon: A list of academic publications that have used flixOpt* + +## Contributing Back + +If flixOpt helped your research: + +- Share your model as an example +- Report issues or contribute code +- Improve documentation + +See the [Contributing Guide](../contribute.md). + +## License + +flixOpt is released under the MIT License. See [License](license.md) for details. diff --git a/docs/home/installation.md b/docs/home/installation.md new file mode 100644 index 000000000..afb24172b --- /dev/null +++ b/docs/home/installation.md @@ -0,0 +1,91 @@ +# Installation + +This guide covers installing flixOpt and its dependencies. + + +## Basic Installation + +Install flixOpt directly into your environment using pip: + +```bash +pip install flixopt +``` + +This provides the core functionality with the HiGHS solver included. + +## Full Installation + +For all features including interactive network visualizations and time series aggregation: + +```bash +pip install "flixopt[full]" +``` + +## Development Installation + +If you want to contribute to flixOpt or work with the latest development version: + +```bash +git clone https://github.com/flixOpt/flixopt.git +cd flixopt +pip install -e ".[full,dev,docs]" +``` + +## Solver Installation + +### HiGHS (Included) + +The HiGHS solver is included with flixOpt and works out of the box. No additional installation is required. + +### Gurobi (Optional) + +For academic use, Gurobi offers free licenses: + +1. Register for an academic license at [gurobi.com](https://www.gurobi.com/academia/) +2. Install Gurobi: + ```bash + pip install gurobipy + ``` +3. Activate your license following Gurobi's instructions + +## Verification + +Verify your installation by running: + +```python +import flixopt +print(flixopt.__version__) +``` + +## Logging Configuration + +flixOpt uses Python's standard logging module with optional colored output via [colorlog](https://github.com/borntyping/python-colorlog). Logging is silent by default but can be easily configured: + +```python +from flixopt import CONFIG + +# Enable colored console logging +CONFIG.Logging.enable_console('INFO') + +# Or use a preset configuration for exploring +CONFIG.exploring() +``` + +Since flixOpt uses Python's standard logging, you can also configure it directly: + +```python +import logging + +# Get the flixopt logger and configure it +logger = logging.getLogger('flixopt') +logger.setLevel(logging.DEBUG) +logger.addHandler(logging.StreamHandler()) +``` + +For more details on logging configuration, see the [`CONFIG.Logging`][flixopt.config.CONFIG.Logging] documentation. + +## Next Steps + +- Follow the [Quick Start](quick-start.md) guide +- Explore the [Minimal Example](../examples/00-Minimal Example.md) +- Read about [Core Concepts](../user-guide/core-concepts.md) diff --git a/docs/home/license.md b/docs/home/license.md new file mode 100644 index 000000000..d00755a0b --- /dev/null +++ b/docs/home/license.md @@ -0,0 +1,43 @@ +# License + +flixOpt is released under the MIT License. + +## MIT License + +``` +MIT License + +Copyright (c) 2022 Chair of Building Energy Systems and Heat Supply - TU Dresden + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +``` + +## What This Means + +The MIT License is a permissive open-source license that allows you to: + +✅ **Use** flixOpt for any purpose, including commercial applications +✅ **Modify** the source code to fit your needs +✅ **Distribute** copies of flixOpt +✅ **Sublicense** under different terms +✅ **Use privately** without making your modifications public + +## Contributing + +By contributing to flixOpt, you agree that your contributions will be licensed under the MIT License. See our [Contributing Guide](../contribute.md) for more information. diff --git a/docs/home/quick-start.md b/docs/home/quick-start.md new file mode 100644 index 000000000..b0bdef7da --- /dev/null +++ b/docs/home/quick-start.md @@ -0,0 +1,132 @@ +# Quick Start + +Get up and running with flixOpt in 5 minutes! This guide walks you through creating and solving your first energy system optimization. + +## Installation + +First, install flixOpt: + +```bash +pip install "flixopt[full]" +``` + +## Your First Model + +Let's create a simple energy system with a generator, demand, and battery storage. + +### 1. Import flixOpt + +```python +import flixopt as fx +import numpy as np +import pandas as pd +``` + +### 2. Define your time horizon + +```python +# 24h period with hourly timesteps +timesteps = pd.date_range('2024-01-01', periods=24, freq='h') +``` + +### 2. Set Up the Flow System + +```python +# Create the flow system +flow_system = fx.FlowSystem(timesteps) + +# Define an effect to minimize (costs) +costs = fx.Effect('costs', 'EUR', 'Minimize total system costs', is_objective=True) +flow_system.add_elements(costs) +``` + +### 4. Add Components + +```python +# Electricity bus +electricity_bus = fx.Bus('electricity') + +# Solar generator with time-varying output +solar_profile = np.array([0, 0, 0, 0, 0, 0, 0.2, 0.5, 0.8, 1.0, + 1.0, 0.9, 0.8, 0.7, 0.5, 0.3, 0.1, 0, + 0, 0, 0, 0, 0, 0]) + +solar = fx.Source( + 'solar', + outputs=[fx.Flow( + 'power', + bus='electricity', + size=100, # 100 kW capacity + relative_maximum=solar_profile + ) +]) + +# Demand +demand_profile = np.array([30, 25, 20, 20, 25, 35, 50, 70, 80, 75, + 70, 65, 60, 65, 70, 80, 90, 95, 85, 70, + 60, 50, 40, 35]) + +demand = fx.Sink('demand', inputs=[ + fx.Flow('consumption', + bus='electricity', + size=1, + fixed_relative_profile=demand_profile) +]) + +# Battery storage +battery = fx.Storage( + 'battery', + charging=fx.Flow('charge', bus='electricity', size=50), + discharging=fx.Flow('discharge', bus='electricity', size=50), + capacity_in_flow_hours=100, # 100 kWh capacity + initial_charge_state=50, # Start at 50% + eta_charge=0.95, + eta_discharge=0.95, +) + +# Add all components to system +flow_system.add_elements(solar, demand, battery, electricity_bus) +``` + +### 5. Run Optimization + +```python +# Create and run optimization +optimization = fx.Optimization('solar_battery_optimization', flow_system) +optimization.solve(fx.solvers.HighsSolver()) +``` + +### 6. Save Results + +```python +# This includes the modeled FlowSystem. SO you can restore both results and inputs +optimization.results.to_file() +``` + +## What's Next? + +Now that you've created your first model, you can: + +- **Learn the concepts** - Read the [Core Concepts](../user-guide/core-concepts.md) guide +- **Explore examples** - Check out more [Examples](../examples/index.md) +- **Deep dive** - Study the [Mathematical Formulation](../user-guide/mathematical-notation/index.md) +- **Build complex models** - Use [Recipes](../user-guide/recipes/index.md) for common patterns + +## Common Workflow + +Most flixOpt projects follow this pattern: + +1. **Define time series** - Set up the temporal resolution +2. **Create flow system** - Initialize with time series and effects +3. **Add buses** - Define connection points +4. **Add components** - Create generators, storage, converters, loads +5. **Run optimization** - Solve the optimization +6. **Save Results** - For later analysis. Or only extract needed data + +## Tips + +- Start simple and add complexity incrementally +- Use meaningful names for components and flows +- Check solver status before analyzing results +- Enable logging during development for debugging +- Visualize results to verify model behavior diff --git a/docs/home/users.md b/docs/home/users.md new file mode 100644 index 000000000..d27f99576 --- /dev/null +++ b/docs/home/users.md @@ -0,0 +1,27 @@ +# Who Uses flixOpt? + +flixOpt is developed and used primarily in academic research for energy system optimization. + +## Primary Users + +- **Researchers** - Energy system modeling and optimization studies +- **Students** - Master's and PhD thesis projects +- **Engineers** - Feasibility studies and system planning + +## Typical Applications + +- Dispatch optimization with renewable integration +- Capacity expansion planning +- Battery and thermal storage sizing +- District heating network optimization +- Combined heat and power (CHP) systems +- Multi-energy systems and sector coupling + +## Get Involved + +Using flixOpt in your research? Consider: + +- [Citing flixOpt](citing.md) in your publications +- Sharing your model as an example +- Contributing to the codebase +- Joining [discussions](https://github.com/flixOpt/flixopt/discussions) diff --git a/docs/index.md b/docs/index.md index 3467bb394..70fd15bf4 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,8 +1,5 @@ --- title: Home -hide: - - navigation - - toc ---
@@ -16,7 +13,7 @@ hide:

Model, optimize, and analyze complex energy systems with a powerful Python framework designed for flexibility and performance.

- 🚀 Get Started + 🚀 Get Started 💡 View Examples ⭐ GitHub

@@ -25,36 +22,44 @@ hide: ## :material-map-marker-path: Quick Navigation -" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 13 }, { "cell_type": "markdown", diff --git a/flixopt/color_processing.py b/flixopt/color_processing.py index f6e9a3b9f..62d8a9542 100644 --- a/flixopt/color_processing.py +++ b/flixopt/color_processing.py @@ -109,6 +109,59 @@ def _rgb_string_to_hex(color: str) -> str: return color +def color_to_rgba(color: str | None, alpha: float = 1.0) -> str: + """Convert any valid color to RGBA string format. + + Handles hex colors (with or without #), named colors, and rgb/rgba strings. + + Args: + color: Color in any valid format (hex '#FF0000' or 'FF0000', + named 'red', rgb 'rgb(255,0,0)', rgba 'rgba(255,0,0,1)'). + alpha: Alpha/opacity value between 0.0 and 1.0. + + Returns: + Color in RGBA format 'rgba(R, G, B, A)'. + + Examples: + >>> color_to_rgba('#FF0000') + 'rgba(255, 0, 0, 1.0)' + >>> color_to_rgba('FF0000') + 'rgba(255, 0, 0, 1.0)' + >>> color_to_rgba('red', 0.5) + 'rgba(255, 0, 0, 0.5)' + >>> color_to_rgba('forestgreen', 0.4) + 'rgba(34, 139, 34, 0.4)' + >>> color_to_rgba(None) + 'rgba(200, 200, 200, 1.0)' + """ + if not color: + return f'rgba(200, 200, 200, {alpha})' + + try: + # Use matplotlib's robust color conversion (handles hex, named, etc.) + rgba = mcolors.to_rgba(color) + except ValueError: + # Try adding # prefix for bare hex colors (e.g., 'FF0000' -> '#FF0000') + if len(color) == 6 and all(c in '0123456789ABCDEFabcdef' for c in color): + try: + rgba = mcolors.to_rgba(f'#{color}') + except ValueError: + return f'rgba(200, 200, 200, {alpha})' + else: + return f'rgba(200, 200, 200, {alpha})' + except TypeError: + return f'rgba(200, 200, 200, {alpha})' + + r = int(round(rgba[0] * 255)) + g = int(round(rgba[1] * 255)) + b = int(round(rgba[2] * 255)) + return f'rgba({r}, {g}, {b}, {alpha})' + + +# Alias for backwards compatibility +hex_to_rgba = color_to_rgba + + def process_colors( colors: None | str | list[str] | dict[str, str], labels: list[str], diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 763aeac9f..fb37fb002 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -220,9 +220,15 @@ def __init__( # Statistics accessor cache - lazily initialized, invalidated on new solution self._statistics: StatisticsAccessor | None = None + # Topology accessor cache - lazily initialized, invalidated on structure change + self._topology: TopologyAccessor | None = None + # Carrier container - local carriers override CONFIG.Carriers self._carriers: CarrierContainer = CarrierContainer() + # Cached flow→carrier mapping (built lazily after connect_and_transform) + self._flow_carriers: dict[str, str] | None = None + # Use properties to validate and store scenario dimension settings self.scenario_independent_sizes = scenario_independent_sizes self.scenario_independent_flow_rates = scenario_independent_flow_rates @@ -1187,6 +1193,31 @@ def carriers(self) -> CarrierContainer: """Carriers registered on this FlowSystem.""" return self._carriers + @property + def flow_carriers(self) -> dict[str, str]: + """Cached mapping of flow labels to carrier names. + + Returns: + Dict mapping flow label to carrier name (lowercase). + Flows without a carrier are not included. + + Raises: + RuntimeError: If FlowSystem is not connected_and_transformed. + """ + if not self.connected_and_transformed: + raise RuntimeError( + 'FlowSystem is not connected_and_transformed. Call FlowSystem.connect_and_transform() first.' + ) + + if self._flow_carriers is None: + self._flow_carriers = {} + for flow_label, flow in self.flows.items(): + bus = self.buses.get(flow.bus) + if bus and bus.carrier: + self._flow_carriers[flow_label] = bus.carrier.lower() + + return self._flow_carriers + def create_model(self, normalize_weights: bool = True) -> FlowSystemModel: """ Create a linopy model from the FlowSystem. @@ -1341,7 +1372,8 @@ def _invalidate_model(self) -> None: """Invalidate the model and element submodels when structure changes. This clears the model, resets the ``connected_and_transformed`` flag, - and clears all element submodels and variable/constraint names. + clears all element submodels and variable/constraint names, and invalidates + the topology accessor cache. Called internally by :meth:`add_elements`, :meth:`add_carriers`, :meth:`reset`, and :meth:`invalidate`. @@ -1352,6 +1384,7 @@ def _invalidate_model(self) -> None: """ self.model = None self._connected_and_transformed = False + self._topology = None # Invalidate topology accessor (and its cached colors) for element in self.values(): element.submodel = None element._variable_names = [] @@ -1509,11 +1542,12 @@ def topology(self) -> TopologyAccessor: """ Access network topology inspection and visualization methods. - This property returns a TopologyAccessor that provides methods to inspect - the network structure and visualize it. + This property returns a cached TopologyAccessor that provides methods to inspect + the network structure and visualize it. The accessor is invalidated when the + FlowSystem structure changes (via reset() or invalidate()). Returns: - A TopologyAccessor instance. + A cached TopologyAccessor instance. Examples: Visualize the network: @@ -1531,7 +1565,9 @@ def topology(self) -> TopologyAccessor: >>> nodes, edges = flow_system.topology.infos() """ - return TopologyAccessor(self) + if self._topology is None: + self._topology = TopologyAccessor(self) + return self._topology def plot_network( self, diff --git a/flixopt/statistics_accessor.py b/flixopt/statistics_accessor.py index 471cfb5d7..f1c02c1b9 100644 --- a/flixopt/statistics_accessor.py +++ b/flixopt/statistics_accessor.py @@ -30,7 +30,7 @@ import plotly.graph_objects as go import xarray as xr -from .color_processing import ColorType, process_colors +from .color_processing import ColorType, hex_to_rgba, process_colors from .config import CONFIG if TYPE_CHECKING: @@ -49,8 +49,8 @@ # Sankey select types with Literal keys for IDE autocomplete -FlowSankeySelect = dict[Literal['flow', 'bus', 'component', 'time', 'period', 'scenario'], Any] -"""Select options for flow-based sankey: flow, bus, component, time, period, scenario.""" +FlowSankeySelect = dict[Literal['flow', 'bus', 'component', 'carrier', 'time', 'period', 'scenario'], Any] +"""Select options for flow-based sankey: flow, bus, component, carrier, time, period, scenario.""" EffectsSankeySelect = dict[Literal['effect', 'component', 'contributor', 'period', 'scenario'], Any] """Select options for effects sankey: effect, component, contributor, period, scenario.""" @@ -262,6 +262,26 @@ def _apply_selection(ds: xr.Dataset, select: SelectType | None, drop: bool = Tru return ds +def _filter_by_carrier(ds: xr.Dataset, carrier: str | list[str] | None) -> xr.Dataset: + """Filter dataset variables by carrier attribute. + + Args: + ds: Dataset with variables that have 'carrier' attributes. + carrier: Carrier name(s) to keep. None means no filtering. + + Returns: + Dataset containing only variables matching the carrier(s). + """ + if carrier is None: + return ds + + carriers = [carrier] if isinstance(carrier, str) else carrier + carriers = [c.lower() for c in carriers] + + matching_vars = [var for var in ds.data_vars if ds[var].attrs.get('carrier', '').lower() in carriers] + return ds[matching_vars] if matching_vars else xr.Dataset() + + def _resolve_facets( ds: xr.Dataset, facet_col: str | None, @@ -400,6 +420,39 @@ def _require_solution(self) -> xr.Dataset: raise RuntimeError('FlowSystem has no solution. Run optimize() or solve() first.') return self._fs.solution + @property + def carrier_colors(self) -> dict[str, str]: + """Cached mapping of carrier name to color. + + Delegates to topology accessor for centralized color caching. + + Returns: + Dict mapping carrier names (lowercase) to hex color strings. + """ + return self._fs.topology.carrier_colors + + @property + def component_colors(self) -> dict[str, str]: + """Cached mapping of component label to color. + + Delegates to topology accessor for centralized color caching. + + Returns: + Dict mapping component labels to hex color strings. + """ + return self._fs.topology.component_colors + + @property + def bus_colors(self) -> dict[str, str]: + """Cached mapping of bus label to color (from carrier). + + Delegates to topology accessor for centralized color caching. + + Returns: + Dict mapping bus labels to hex color strings. + """ + return self._fs.topology.bus_colors + @property def plot(self) -> StatisticsPlotAccessor: """Access plotting methods for statistics. @@ -417,20 +470,43 @@ def plot(self) -> StatisticsPlotAccessor: @property def flow_rates(self) -> xr.Dataset: - """All flow rates as a Dataset with flow labels as variable names.""" + """All flow rates as a Dataset with flow labels as variable names. + + Each variable has a 'carrier' attribute indicating the carrier type + of the bus it connects to (e.g., 'heat', 'electricity', 'gas'). + """ self._require_solution() if self._flow_rates is None: flow_rate_vars = [v for v in self._fs.solution.data_vars if v.endswith('|flow_rate')] - self._flow_rates = xr.Dataset({v.replace('|flow_rate', ''): self._fs.solution[v] for v in flow_rate_vars}) + flow_carriers = self._fs.flow_carriers # Cached lookup + data_vars = {} + for v in flow_rate_vars: + flow_label = v.replace('|flow_rate', '') + da = self._fs.solution[v].copy() + # Add carrier as attribute (from cached mapping) + da.attrs['carrier'] = flow_carriers.get(flow_label) + data_vars[flow_label] = da + self._flow_rates = xr.Dataset(data_vars) return self._flow_rates @property def flow_hours(self) -> xr.Dataset: - """All flow hours (energy) as a Dataset with flow labels as variable names.""" + """All flow hours (energy) as a Dataset with flow labels as variable names. + + Each variable has a 'carrier' attribute indicating the carrier type + of the bus it connects to (e.g., 'heat', 'electricity', 'gas'). + """ self._require_solution() if self._flow_hours is None: hours = self._fs.hours_per_timestep - self._flow_hours = self.flow_rates * hours + flow_rates = self.flow_rates + # Multiply and preserve carrier attributes + data_vars = {} + for var in flow_rates.data_vars: + da = flow_rates[var] * hours + da.attrs['carrier'] = flow_rates[var].attrs.get('carrier') + data_vars[var] = da + self._flow_hours = xr.Dataset(data_vars) return self._flow_hours @property @@ -791,19 +867,20 @@ def __init__(self, plot_accessor: StatisticsPlotAccessor) -> None: def _extract_flow_filters( self, select: FlowSankeySelect | None - ) -> tuple[SelectType | None, list[str] | None, list[str] | None, list[str] | None]: + ) -> tuple[SelectType | None, list[str] | None, list[str] | None, list[str] | None, list[str] | None]: """Extract special filters from select dict. Returns: - Tuple of (xarray_select, flow_filter, bus_filter, component_filter). + Tuple of (xarray_select, flow_filter, bus_filter, component_filter, carrier_filter). """ if select is None: - return None, None, None, None + return None, None, None, None, None select = dict(select) # Copy to avoid mutating original flow_filter = select.pop('flow', None) bus_filter = select.pop('bus', None) component_filter = select.pop('component', None) + carrier_filter = select.pop('carrier', None) # Normalize to lists if isinstance(flow_filter, str): @@ -812,8 +889,10 @@ def _extract_flow_filters( bus_filter = [bus_filter] if isinstance(component_filter, str): component_filter = [component_filter] + if isinstance(carrier_filter, str): + carrier_filter = [carrier_filter] - return select if select else None, flow_filter, bus_filter, component_filter + return select if select else None, flow_filter, bus_filter, component_filter, carrier_filter def _build_flow_links( self, @@ -821,11 +900,19 @@ def _build_flow_links( flow_filter: list[str] | None = None, bus_filter: list[str] | None = None, component_filter: list[str] | None = None, + carrier_filter: list[str] | None = None, min_value: float = 1e-6, ) -> tuple[set[str], dict[str, list]]: """Build Sankey nodes and links from flow data.""" nodes: set[str] = set() - links: dict[str, list] = {'source': [], 'target': [], 'value': [], 'label': []} + links: dict[str, list] = {'source': [], 'target': [], 'value': [], 'label': [], 'carrier': []} + + # Normalize carrier filter to lowercase + if carrier_filter is not None: + carrier_filter = [c.lower() for c in carrier_filter] + + # Use flow_rates to get carrier names from xarray attributes (already computed) + flow_rates = self._stats.flow_rates for flow in self._fs.flows.values(): label = flow.label_full @@ -839,6 +926,13 @@ def _build_flow_links( comp_label = flow.component if bus_filter is not None and bus_label not in bus_filter: continue + + # Get carrier name from flow_rates xarray attribute (efficient lookup) + carrier_name = flow_rates[label].attrs.get('carrier') if label in flow_rates else None + + if carrier_filter is not None: + if carrier_name is None or carrier_name.lower() not in carrier_filter: + continue if component_filter is not None and comp_label not in component_filter: continue @@ -857,6 +951,7 @@ def _build_flow_links( links['target'].append(target) links['value'].append(abs(value)) links['label'].append(label) + links['carrier'].append(carrier_name) return nodes, links @@ -872,8 +967,20 @@ def _create_figure( node_list = list(nodes) node_indices = {n: i for i, n in enumerate(node_list)} - color_map = process_colors(colors, node_list) - node_colors = [color_map[node] for node in node_list] + # Build node colors: buses use carrier colors, components use process_colors + node_colors = self._get_node_colors(node_list, colors) + + # Build link colors from carrier colors (subtle/semi-transparent) + link_colors = self._get_link_colors(links.get('carrier', [])) + + link_dict: dict[str, Any] = dict( + source=[node_indices[s] for s in links['source']], + target=[node_indices[t] for t in links['target']], + value=links['value'], + label=links['label'], + ) + if link_colors: + link_dict['color'] = link_colors fig = go.Figure( data=[ @@ -881,29 +988,60 @@ def _create_figure( node=dict( pad=15, thickness=20, line=dict(color='black', width=0.5), label=node_list, color=node_colors ), - link=dict( - source=[node_indices[s] for s in links['source']], - target=[node_indices[t] for t in links['target']], - value=links['value'], - label=links['label'], - ), + link=link_dict, ) ] ) fig.update_layout(title=title, **plotly_kwargs) return fig + def _get_node_colors(self, node_list: list[str], colors: ColorType | None) -> list[str]: + """Get colors for nodes: buses use cached bus_colors, components use process_colors.""" + # Get fallback colors from process_colors + fallback_colors = process_colors(colors, node_list) + + # Use cached bus colors for efficiency + bus_colors = self._stats.bus_colors + + node_colors = [] + for node in node_list: + # Check if node is a bus with a cached color + if node in bus_colors: + node_colors.append(bus_colors[node]) + else: + # Fall back to process_colors + node_colors.append(fallback_colors[node]) + + return node_colors + + def _get_link_colors(self, carriers: list[str | None]) -> list[str]: + """Get subtle/semi-transparent colors for links based on their carriers.""" + if not carriers: + return [] + + # Use cached carrier colors for efficiency + carrier_colors = self._stats.carrier_colors + + link_colors = [] + for carrier_name in carriers: + hex_color = carrier_colors.get(carrier_name.lower()) if carrier_name else None + link_colors.append(hex_to_rgba(hex_color, alpha=0.4) if hex_color else hex_to_rgba('', alpha=0.4)) + + return link_colors + def _finalize(self, fig: go.Figure, links: dict[str, list], show: bool | None) -> PlotResult: """Create PlotResult and optionally show figure.""" - sankey_ds = xr.Dataset( - {'value': ('link', links['value'])}, - coords={ - 'link': range(len(links['value'])), - 'source': ('link', links['source']), - 'target': ('link', links['target']), - 'label': ('link', links['label']), - }, - ) + coords: dict[str, Any] = { + 'link': range(len(links['value'])), + 'source': ('link', links['source']), + 'target': ('link', links['target']), + 'label': ('link', links['label']), + } + # Add carrier if present + if 'carrier' in links: + coords['carrier'] = ('link', links['carrier']) + + sankey_ds = xr.Dataset({'value': ('link', links['value'])}, coords=coords) if show is None: show = CONFIG.Plotting.default_show @@ -939,7 +1077,7 @@ def flows( PlotResult with Sankey flow data and figure. """ self._stats._require_solution() - xr_select, flow_filter, bus_filter, component_filter = self._extract_flow_filters(select) + xr_select, flow_filter, bus_filter, component_filter, carrier_filter = self._extract_flow_filters(select) ds = self._stats.flow_hours.copy() @@ -959,7 +1097,7 @@ def flows( if dim in ds.dims: ds = ds.sum(dim=dim) - nodes, links = self._build_flow_links(ds, flow_filter, bus_filter, component_filter) + nodes, links = self._build_flow_links(ds, flow_filter, bus_filter, component_filter, carrier_filter) fig = self._create_figure(nodes, links, colors, 'Energy Flow', **plotly_kwargs) return self._finalize(fig, links, show) @@ -989,7 +1127,7 @@ def sizes( PlotResult with Sankey size data and figure. """ self._stats._require_solution() - xr_select, flow_filter, bus_filter, component_filter = self._extract_flow_filters(select) + xr_select, flow_filter, bus_filter, component_filter, carrier_filter = self._extract_flow_filters(select) ds = self._stats.sizes.copy() ds = _apply_selection(ds, xr_select) @@ -1004,7 +1142,7 @@ def sizes( valid_labels = [lbl for lbl in ds.data_vars if float(ds[lbl].max()) < max_size] ds = ds[valid_labels] - nodes, links = self._build_flow_links(ds, flow_filter, bus_filter, component_filter) + nodes, links = self._build_flow_links(ds, flow_filter, bus_filter, component_filter, carrier_filter) fig = self._create_figure(nodes, links, colors, 'Investment Sizes (Capacities)', **plotly_kwargs) return self._finalize(fig, links, show) @@ -1032,7 +1170,7 @@ def peak_flow( PlotResult with Sankey peak flow data and figure. """ self._stats._require_solution() - xr_select, flow_filter, bus_filter, component_filter = self._extract_flow_filters(select) + xr_select, flow_filter, bus_filter, component_filter, carrier_filter = self._extract_flow_filters(select) ds = self._stats.flow_rates.copy() ds = _apply_selection(ds, xr_select) @@ -1042,7 +1180,7 @@ def peak_flow( if dim in ds.dims: ds = ds.max(dim=dim) - nodes, links = self._build_flow_links(ds, flow_filter, bus_filter, component_filter) + nodes, links = self._build_flow_links(ds, flow_filter, bus_filter, component_filter, carrier_filter) fig = self._create_figure(nodes, links, colors, 'Peak Flow Rates', **plotly_kwargs) return self._finalize(fig, links, show) @@ -1172,8 +1310,8 @@ def sankey(self) -> SankeyPlotAccessor: def _get_color_map_for_balance(self, node: str, flow_labels: list[str]) -> dict[str, str]: """Build color map for balance plot. - - Bus balance: colors from component.color - - Component balance: colors from flow's carrier + - Bus balance: colors from component.color (using cached component_colors) + - Component balance: colors from flow's carrier (using cached carrier_colors) Raises: RuntimeError: If FlowSystem is not connected_and_transformed. @@ -1187,12 +1325,20 @@ def _get_color_map_for_balance(self, node: str, flow_labels: list[str]) -> dict[ color_map = {} uncolored = [] + # Get cached colors for efficient lookup + carrier_colors = self._stats.carrier_colors + component_colors = self._stats.component_colors + flow_rates = self._stats.flow_rates + for label in flow_labels: if is_bus: - color = self._fs.components[self._fs.flows[label].component].color + # Use cached component colors + comp_label = self._fs.flows[label].component + color = component_colors.get(comp_label) else: - carrier = self._fs.get_carrier(label) # get_carrier accepts flow labels - color = carrier.color if carrier else None + # Use carrier name from xarray attribute (already computed) + cached colors + carrier_name = flow_rates[label].attrs.get('carrier') if label in flow_rates else None + color = carrier_colors.get(carrier_name) if carrier_name else None if color: color_map[label] = color @@ -1323,6 +1469,119 @@ def balance( return PlotResult(data=ds, figure=fig) + def carrier_balance( + self, + carrier: str, + *, + select: SelectType | None = None, + include: FilterType | None = None, + exclude: FilterType | None = None, + unit: Literal['flow_rate', 'flow_hours'] = 'flow_rate', + colors: ColorType | None = None, + facet_col: str | None = 'period', + facet_row: str | None = 'scenario', + show: bool | None = None, + **plotly_kwargs: Any, + ) -> PlotResult: + """Plot carrier-level balance showing all flows of a carrier type. + + Shows production (positive) and consumption (negative) of a carrier + across all buses of that carrier type in the system. + + Args: + carrier: Carrier name (e.g., 'heat', 'electricity', 'gas'). + select: xarray-style selection dict. + include: Only include flows containing these substrings. + exclude: Exclude flows containing these substrings. + unit: 'flow_rate' (power) or 'flow_hours' (energy). + colors: Color specification (colorscale name, color list, or label-to-color dict). + facet_col: Dimension for column facets. + facet_row: Dimension for row facets. + show: Whether to display the plot. + + Returns: + PlotResult with .data and .figure. + + Examples: + >>> fs.statistics.plot.carrier_balance('heat') + >>> fs.statistics.plot.carrier_balance('electricity', unit='flow_hours') + + Notes: + - Inputs to carrier buses (from sources/converters) are shown as positive + - Outputs from carrier buses (to sinks/converters) are shown as negative + - Internal transfers between buses of the same carrier appear on both sides + """ + self._stats._require_solution() + carrier = carrier.lower() + + # Find all buses with this carrier + carrier_buses = [bus for bus in self._fs.buses.values() if bus.carrier == carrier] + if not carrier_buses: + raise KeyError(f"No buses found with carrier '{carrier}'") + + # Collect all flows connected to these buses + input_labels: list[str] = [] # Inputs to buses = production + output_labels: list[str] = [] # Outputs from buses = consumption + + for bus in carrier_buses: + for flow in bus.inputs: + input_labels.append(flow.label_full) + for flow in bus.outputs: + output_labels.append(flow.label_full) + + all_labels = input_labels + output_labels + filtered_labels = _filter_by_pattern(all_labels, include, exclude) + if not filtered_labels: + logger.warning(f'No flows remaining after filtering for carrier {carrier}') + return PlotResult(data=xr.Dataset(), figure=go.Figure()) + + # Get data from statistics + if unit == 'flow_rate': + ds = self._stats.flow_rates[[lbl for lbl in filtered_labels if lbl in self._stats.flow_rates]] + else: + ds = self._stats.flow_hours[[lbl for lbl in filtered_labels if lbl in self._stats.flow_hours]] + + # Negate outputs (consumption) - opposite convention from bus balance + for label in output_labels: + if label in ds: + ds[label] = -ds[label] + + ds = _apply_selection(ds, select) + actual_facet_col, actual_facet_row = _resolve_facets(ds, facet_col, facet_row) + + # Use cached component colors for flows + if colors is None: + component_colors = self._stats.component_colors + color_map = {} + uncolored = [] + for label in ds.data_vars: + flow = self._fs.flows.get(label) + if flow: + color = component_colors.get(flow.component) + if color: + color_map[label] = color + continue + uncolored.append(label) + if uncolored: + color_map.update(process_colors(CONFIG.Plotting.default_qualitative_colorscale, uncolored)) + colors = color_map + + fig = _create_stacked_bar( + ds, + colors=colors, + title=f'{carrier.capitalize()} Balance ({unit})', + facet_col=actual_facet_col, + facet_row=actual_facet_row, + **plotly_kwargs, + ) + + if show is None: + show = CONFIG.Plotting.default_show + if show: + fig.show() + + return PlotResult(data=ds, figure=fig) + def heatmap( self, variables: str | list[str], diff --git a/flixopt/topology_accessor.py b/flixopt/topology_accessor.py index ca61a7a23..191806e6c 100644 --- a/flixopt/topology_accessor.py +++ b/flixopt/topology_accessor.py @@ -15,7 +15,7 @@ import plotly.graph_objects as go -from .color_processing import ColorType, process_colors +from .color_processing import ColorType, hex_to_rgba, process_colors from .config import CONFIG, DEPRECATION_REMOVAL_VERSION if TYPE_CHECKING: @@ -67,13 +67,15 @@ def _plot_network( ) for edge in edge_infos.values(): + # Use carrier color if available, otherwise default gray + edge_color = edge.get('carrier_color', '#222831') or '#222831' net.add_edge( edge['start'], edge['end'], label=edge['label'], title=edge['infos'].replace(')', '\n)'), font={'color': '#4D4D4D', 'size': 14}, - color='#222831', + color=edge_color, ) net.barnes_hut(central_gravity=0.8, spring_length=50, spring_strength=0.05, gravity=-10000) @@ -138,6 +140,67 @@ def __init__(self, flow_system: FlowSystem) -> None: """ self._fs = flow_system + # Cached color mappings (lazily initialized) + self._carrier_colors: dict[str, str] | None = None + self._component_colors: dict[str, str] | None = None + self._bus_colors: dict[str, str] | None = None + + @property + def carrier_colors(self) -> dict[str, str]: + """Cached mapping of carrier name to hex color. + + Returns: + Dict mapping carrier names (lowercase) to hex color strings. + Only carriers with a color defined are included. + + Examples: + >>> fs.topology.carrier_colors + {'electricity': '#FECB52', 'heat': '#D62728', 'gas': '#1F77B4'} + """ + if self._carrier_colors is None: + self._carrier_colors = {name: carrier.color for name, carrier in self._fs.carriers.items() if carrier.color} + return self._carrier_colors + + @property + def component_colors(self) -> dict[str, str]: + """Cached mapping of component label to hex color. + + Returns: + Dict mapping component labels to hex color strings. + Only components with a color defined are included. + + Examples: + >>> fs.topology.component_colors + {'Boiler': '#1f77b4', 'CHP': '#ff7f0e', 'HeatPump': '#2ca02c'} + """ + if self._component_colors is None: + self._component_colors = {label: comp.color for label, comp in self._fs.components.items() if comp.color} + return self._component_colors + + @property + def bus_colors(self) -> dict[str, str]: + """Cached mapping of bus label to hex color (from carrier). + + Bus colors are derived from their associated carrier's color. + + Returns: + Dict mapping bus labels to hex color strings. + Only buses with a carrier that has a color defined are included. + + Examples: + >>> fs.topology.bus_colors + {'ElectricityBus': '#FECB52', 'HeatBus': '#D62728'} + """ + if self._bus_colors is None: + carrier_colors = self.carrier_colors + self._bus_colors = {} + for label, bus in self._fs.buses.items(): + if bus.carrier: + color = carrier_colors.get(bus.carrier.lower()) + if color: + self._bus_colors[label] = color + return self._bus_colors + def infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]: """ Get network topology information as dictionaries. @@ -168,15 +231,20 @@ def infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]: for node in chain(self._fs.components.values(), self._fs.buses.values()) } - edges = { - flow.label_full: { + # Use cached colors for efficient lookup + flow_carriers = self._fs.flow_carriers + carrier_colors = self.carrier_colors + + edges = {} + for flow in self._fs.flows.values(): + carrier_name = flow_carriers.get(flow.label_full) + edges[flow.label_full] = { 'label': flow.label, 'start': flow.bus if flow.is_input_in_component else flow.component, 'end': flow.component if flow.is_input_in_component else flow.bus, 'infos': flow.__str__(), + 'carrier_color': carrier_colors.get(carrier_name) if carrier_name else None, } - for flow in self._fs.flows.values() - } return nodes, edges @@ -235,6 +303,7 @@ def plot( 'value': [], 'label': [], 'customdata': [], # For hover text + 'color': [], # Carrier-based colors } # Collect node hover info (format repr for HTML display) @@ -244,6 +313,10 @@ def plot( for bus in self._fs.buses.values(): node_hover[bus.label] = repr(bus).replace('\n', '
') + # Use cached colors for efficient lookup + flow_carriers = self._fs.flow_carriers + carrier_colors = self.carrier_colors + for flow in self._fs.flows.values(): bus_label = flow.bus comp_label = flow.component @@ -263,21 +336,35 @@ def plot( links['label'].append(flow.label_full) links['customdata'].append(repr(flow).replace('\n', '
')) # Flow repr for hover + # Get carrier color for this flow (subtle/semi-transparent) using cached colors + carrier_name = flow_carriers.get(flow.label_full) + color = carrier_colors.get(carrier_name) if carrier_name else None + links['color'].append(hex_to_rgba(color, alpha=0.4) if color else hex_to_rgba('', alpha=0.4)) + # Create figure node_list = list(nodes) node_indices = {n: i for i, n in enumerate(node_list)} - # Get colors for buses only, then apply to all nodes - bus_labels = [bus.label for bus in self._fs.buses.values()] - bus_color_map = process_colors(colors, bus_labels) + # Get colors for buses and components using cached colors + bus_colors_cached = self.bus_colors + component_colors_cached = self.component_colors + + # If user provided colors, process them for buses + if colors is not None: + bus_labels = [bus.label for bus in self._fs.buses.values()] + bus_color_map = process_colors(colors, bus_labels) + else: + bus_color_map = bus_colors_cached - # Assign colors to nodes: buses get their color, components get a neutral gray + # Assign colors to nodes: buses get their color, components get their color or neutral gray node_colors = [] for node in node_list: if node in bus_color_map: node_colors.append(bus_color_map[node]) + elif node in component_colors_cached: + node_colors.append(component_colors_cached[node]) else: - # Component - use a neutral gray + # Fallback - use a neutral gray node_colors.append('#808080') # Build hover text for nodes @@ -302,6 +389,7 @@ def plot( label=links['label'], customdata=links['customdata'], hovertemplate='%{customdata}', + color=links['color'], # Carrier-based colors ), ) ] From 446f2aac619b444dc9ba6651ca86a246239abb90 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 12 Dec 2025 12:43:05 +0100 Subject: [PATCH 36/49] Improve validation of size being present Improve example notebooks Fix resampling Re-Connect FLow_system when loading from dataset --- docs/notebooks/01-quickstart.ipynb | 16 +- docs/notebooks/02-heat-system.ipynb | 29 +- .../03-investment-optimization.ipynb | 65 +- .../04-operational-constraints.ipynb | 17 +- docs/notebooks/05-multi-carrier-system.ipynb | 15 +- docs/notebooks/06-piecewise-efficiency.ipynb | 15 +- docs/notebooks/07-scenarios-and-periods.ipynb | 15 +- .../08-large-scale-optimization.ipynb | 15 +- .../09-plotting-and-data-access.ipynb | 8884 +++++++++++++++++ .../data/generate_example_systems.py | 333 + docs/notebooks/index.md | 45 +- flixopt/components.py | 103 +- flixopt/elements.py | 73 +- flixopt/flow_system.py | 7 + flixopt/interface.py | 10 +- flixopt/io.py | 2 +- flixopt/structure.py | 9 +- flixopt/transform_accessor.py | 41 + mkdocs.yml | 24 +- tests/conftest.py | 18 +- tests/deprecated/conftest.py | 18 +- tests/deprecated/test_component.py | 22 +- tests/deprecated/test_effect.py | 4 +- tests/deprecated/test_flow.py | 1 + tests/deprecated/test_flow_system_resample.py | 26 +- tests/deprecated/test_functional.py | 18 +- tests/deprecated/test_scenarios.py | 4 +- tests/deprecated/test_storage.py | 1 + tests/test_component.py | 22 +- tests/test_effect.py | 4 +- tests/test_flow.py | 1 + tests/test_flow_system_resample.py | 26 +- tests/test_functional.py | 18 +- tests/test_scenarios.py | 4 +- tests/test_storage.py | 1 + 35 files changed, 9622 insertions(+), 284 deletions(-) create mode 100644 docs/notebooks/09-plotting-and-data-access.ipynb create mode 100644 docs/notebooks/data/generate_example_systems.py diff --git a/docs/notebooks/01-quickstart.ipynb b/docs/notebooks/01-quickstart.ipynb index be0da5779..ba4becd0c 100644 --- a/docs/notebooks/01-quickstart.ipynb +++ b/docs/notebooks/01-quickstart.ipynb @@ -4,21 +4,7 @@ "cell_type": "markdown", "id": "0", "metadata": {}, - "source": [ - "# Quickstart: Heating a Small Workshop\n", - "\n", - "## User Story\n", - "\n", - "> *You manage a small workshop that needs heating. You have a gas boiler and want to find the optimal operation schedule to minimize heating costs over the next few hours.*\n", - "\n", - "This notebook introduces the **core concepts** of flixopt:\n", - "\n", - "- **FlowSystem**: The container for your energy system model\n", - "- **Bus**: Balance nodes where energy flows meet\n", - "- **Effect**: Quantities to track and optimize (costs, emissions)\n", - "- **Components**: Equipment like boilers, sources, and sinks\n", - "- **Flow**: Connections between components and buses" - ] + "source": "# Quickstart\n\nHeat a small workshop with a gas boiler - the minimal working example.\n\nThis notebook introduces the **core concepts** of flixopt:\n\n- **FlowSystem**: The container for your energy system model\n- **Bus**: Balance nodes where energy flows meet\n- **Effect**: Quantities to track and optimize (costs, emissions)\n- **Components**: Equipment like boilers, sources, and sinks\n- **Flow**: Connections between components and buses" }, { "cell_type": "markdown", diff --git a/docs/notebooks/02-heat-system.ipynb b/docs/notebooks/02-heat-system.ipynb index fd868ea9a..5028065fd 100644 --- a/docs/notebooks/02-heat-system.ipynb +++ b/docs/notebooks/02-heat-system.ipynb @@ -4,20 +4,7 @@ "cell_type": "markdown", "id": "0", "metadata": {}, - "source": [ - "# District Heating System with Thermal Storage\n", - "\n", - "## User Story\n", - "\n", - "> *You operate a small district heating network serving an office building. The system has a gas boiler and a thermal storage tank. Electricity prices vary throughout the day, and you want to optimize when to charge/discharge the storage to minimize costs.*\n", - "\n", - "This notebook introduces:\n", - "\n", - "- **Storage**: Thermal buffer tanks with charging/discharging\n", - "- **Time series data**: Using real demand profiles\n", - "- **Multiple components**: Combining boiler, storage, and loads\n", - "- **Result visualization**: Heatmaps, balance plots, and charge states" - ] + "source": "# Heat System\n\nDistrict heating with thermal storage and time-varying prices.\n\nThis notebook introduces:\n\n- **Storage**: Thermal buffer tanks with charging/discharging\n- **Time series data**: Using real demand profiles\n- **Multiple components**: Combining boiler, storage, and loads\n- **Result visualization**: Heatmaps, balance plots, and charge states" }, { "cell_type": "markdown", @@ -125,14 +112,14 @@ "metadata": {}, "outputs": [], "source": [ - "# Time-of-use gas prices (\u20ac/kWh)\n", + "# Time-of-use gas prices (€/kWh)\n", "gas_price = np.where(\n", " (hour_of_day >= 6) & (hour_of_day <= 22),\n", " 0.08, # Peak: 6am-10pm\n", " 0.05, # Off-peak: 10pm-6am\n", ")\n", "\n", - "fig = px.line(x=timesteps, y=gas_price, title='Gas Price [\u20ac/kWh]', labels={'x': 'Time', 'y': '\u20ac/kWh'})\n", + "fig = px.line(x=timesteps, y=gas_price, title='Gas Price [€/kWh]', labels={'x': 'Time', 'y': '€/kWh'})\n", "fig" ] }, @@ -149,9 +136,9 @@ "- Office building heat demand\n", "\n", "```\n", - "Gas Grid \u2500\u2500\u25ba [Gas] \u2500\u2500\u25ba Boiler \u2500\u2500\u25ba [Heat] \u25c4\u2500\u2500\u25ba Storage\n", - " \u2502\n", - " \u25bc\n", + "Gas Grid ──► [Gas] ──► Boiler ──► [Heat] ◄──► Storage\n", + " │\n", + " ▼\n", " Office\n", "```" ] @@ -170,7 +157,7 @@ " fx.Bus('Gas', carrier='gas'),\n", " fx.Bus('Heat', carrier='heat'),\n", " # === Effect ===\n", - " fx.Effect('costs', '\u20ac', 'Operating Costs', is_standard=True, is_objective=True),\n", + " fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n", " # === Gas Supply with time-varying price ===\n", " fx.Source(\n", " 'GasGrid',\n", @@ -311,7 +298,7 @@ "total_costs = flow_system.solution['costs'].item()\n", "total_heat = heat_demand.sum()\n", "\n", - "print(f'Total operating costs: {total_costs:.2f} \u20ac')\n", + "print(f'Total operating costs: {total_costs:.2f} €')\n", "print(f'Total heat delivered: {total_heat:.0f} kWh')\n", "print(f'Average cost: {total_costs / total_heat * 100:.2f} ct/kWh')" ] diff --git a/docs/notebooks/03-investment-optimization.ipynb b/docs/notebooks/03-investment-optimization.ipynb index c3ffe2cdd..478f93798 100644 --- a/docs/notebooks/03-investment-optimization.ipynb +++ b/docs/notebooks/03-investment-optimization.ipynb @@ -4,20 +4,7 @@ "cell_type": "markdown", "id": "0", "metadata": {}, - "source": [ - "# Investment Optimization: Sizing a Solar Heating System\n", - "\n", - "## User Story\n", - "\n", - "> *You're designing a solar thermal system for a swimming pool. You need to decide: How large should the solar collectors be? How big should the buffer tank be? The goal is to minimize total costs (investment + operation) over the planning horizon.*\n", - "\n", - "This notebook introduces:\n", - "\n", - "- **InvestParameters**: Define investment decisions with size bounds and costs\n", - "- **Investment costs**: Fixed costs and size-dependent costs\n", - "- **Optimal sizing**: Let the optimizer find the best equipment sizes\n", - "- **Trade-off analysis**: Balance investment vs. operating costs" - ] + "source": "# Sizing\n\nSize a solar heating system - let the optimizer decide equipment sizes.\n\nThis notebook introduces:\n\n- **InvestParameters**: Define investment decisions with size bounds and costs\n- **Investment costs**: Fixed costs and size-dependent costs\n- **Optimal sizing**: Let the optimizer find the best equipment sizes\n- **Trade-off analysis**: Balance investment vs. operating costs" }, { "cell_type": "markdown", @@ -59,13 +46,13 @@ "- **Pool**: Constant heat demand of 150 kW during operating hours\n", "\n", "```\n", - " \u2600\ufe0f Solar \u2500\u2500\u25ba [Heat] \u25c4\u2500\u2500 Boiler \u25c4\u2500\u2500 [Gas]\n", - " \u2502\n", - " \u25bc\n", + " ☀️ Solar ──► [Heat] ◄── Boiler ◄── [Gas]\n", + " │\n", + " ▼\n", " Buffer Tank\n", - " \u2502\n", - " \u25bc\n", - " Pool \ud83c\udfca\n", + " │\n", + " ▼\n", + " Pool 🏊\n", "```" ] }, @@ -91,7 +78,7 @@ "hours = np.arange(168)\n", "hour_of_day = hours % 24\n", "\n", - "# Solar radiation profile (kW/m\u00b2 equivalent, simplified)\n", + "# Solar radiation profile (kW/m² equivalent, simplified)\n", "# Peak around noon, zero at night\n", "solar_profile = np.maximum(0, np.sin((hour_of_day - 6) * np.pi / 12)) * 0.8\n", "solar_profile = np.where((hour_of_day >= 6) & (hour_of_day <= 20), solar_profile, 0)\n", @@ -138,7 +125,7 @@ "source": [ "## Define Costs\n", "\n", - "Investment costs are **annualized** (\u20ac/year) to compare with operating costs:" + "Investment costs are **annualized** (€/year) to compare with operating costs:" ] }, { @@ -149,14 +136,14 @@ "outputs": [], "source": [ "# Cost parameters\n", - "GAS_PRICE = 0.12 # \u20ac/kWh - high gas price makes solar attractive\n", + "GAS_PRICE = 0.12 # €/kWh - high gas price makes solar attractive\n", "\n", - "# Solar collectors: 400 \u20ac/kW installed, 20-year lifetime \u2192 ~25 \u20ac/kW/year annualized\n", + "# Solar collectors: 400 €/kW installed, 20-year lifetime → ~25 €/kW/year annualized\n", "# (simplified, real calculation would include interest rate)\n", - "SOLAR_COST_PER_KW = 20 # \u20ac/kW/year\n", + "SOLAR_COST_PER_KW = 20 # €/kW/year\n", "\n", - "# Buffer tank: 50 \u20ac/kWh capacity, 30-year lifetime \u2192 ~2 \u20ac/kWh/year\n", - "TANK_COST_PER_KWH = 1.5 # \u20ac/kWh/year\n", + "# Buffer tank: 50 €/kWh capacity, 30-year lifetime → ~2 €/kWh/year\n", + "TANK_COST_PER_KWH = 1.5 # €/kWh/year\n", "\n", "# Scale factor: We model 1 week, but costs are annual\n", "# So we scale investment costs to weekly equivalent\n", @@ -164,8 +151,8 @@ "SOLAR_COST_WEEKLY = SOLAR_COST_PER_KW / WEEKS_PER_YEAR\n", "TANK_COST_WEEKLY = TANK_COST_PER_KWH / WEEKS_PER_YEAR\n", "\n", - "print(f'Solar cost: {SOLAR_COST_WEEKLY:.3f} \u20ac/kW/week')\n", - "print(f'Tank cost: {TANK_COST_WEEKLY:.4f} \u20ac/kWh/week')" + "print(f'Solar cost: {SOLAR_COST_WEEKLY:.3f} €/kW/week')\n", + "print(f'Tank cost: {TANK_COST_WEEKLY:.4f} €/kWh/week')" ] }, { @@ -192,7 +179,7 @@ " fx.Bus('Heat', carrier='heat'),\n", " fx.Bus('Gas', carrier='gas'),\n", " # === Effects ===\n", - " fx.Effect('costs', '\u20ac', 'Total Costs', is_standard=True, is_objective=True),\n", + " fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n", " # === Gas Supply ===\n", " fx.Source(\n", " 'GasGrid',\n", @@ -324,11 +311,11 @@ "gas_costs = total_costs - solar_invest - tank_invest\n", "\n", "print('=== Weekly Cost Breakdown ===')\n", - "print(f'Solar investment: {solar_invest:.2f} \u20ac ({solar_invest / total_costs * 100:.1f}%)')\n", - "print(f'Tank investment: {tank_invest:.2f} \u20ac ({tank_invest / total_costs * 100:.1f}%)')\n", - "print(f'Gas operating: {gas_costs:.2f} \u20ac ({gas_costs / total_costs * 100:.1f}%)')\n", - "print('\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500')\n", - "print(f'Total: {total_costs:.2f} \u20ac')" + "print(f'Solar investment: {solar_invest:.2f} € ({solar_invest / total_costs * 100:.1f}%)')\n", + "print(f'Tank investment: {tank_invest:.2f} € ({tank_invest / total_costs * 100:.1f}%)')\n", + "print(f'Gas operating: {gas_costs:.2f} € ({gas_costs / total_costs * 100:.1f}%)')\n", + "print('─────────────────────────────')\n", + "print(f'Total: {total_costs:.2f} €')" ] }, { @@ -394,10 +381,10 @@ "savings_pct = savings / gas_only_cost * 100\n", "\n", "print('=== Comparison with Gas-Only ===')\n", - "print(f'Gas-only cost: {gas_only_cost:.2f} \u20ac/week')\n", - "print(f'With solar: {total_costs:.2f} \u20ac/week')\n", - "print(f'Savings: {savings:.2f} \u20ac/week ({savings_pct:.1f}%)')\n", - "print(f'Annual savings: {savings * 52:.0f} \u20ac/year')" + "print(f'Gas-only cost: {gas_only_cost:.2f} €/week')\n", + "print(f'With solar: {total_costs:.2f} €/week')\n", + "print(f'Savings: {savings:.2f} €/week ({savings_pct:.1f}%)')\n", + "print(f'Annual savings: {savings * 52:.0f} €/year')" ] }, { diff --git a/docs/notebooks/04-operational-constraints.ipynb b/docs/notebooks/04-operational-constraints.ipynb index 2fbd78cb9..d204b8809 100644 --- a/docs/notebooks/04-operational-constraints.ipynb +++ b/docs/notebooks/04-operational-constraints.ipynb @@ -4,20 +4,7 @@ "cell_type": "markdown", "id": "0", "metadata": {}, - "source": [ - "# Operational Constraints: Industrial Boiler with Startup Costs\n", - "\n", - "## User Story\n", - "\n", - "> *You operate an industrial steam boiler for a factory. The boiler has significant startup costs (fuel for warmup, operator time) and can't be cycled on/off frequently due to thermal stress. You need to find an operating schedule that minimizes costs while respecting these operational constraints.*\n", - "\n", - "This notebook introduces:\n", - "\n", - "- **StatusParameters**: Model on/off decisions with constraints\n", - "- **Startup costs**: Penalties for turning equipment on\n", - "- **Minimum uptime/downtime**: Prevent rapid cycling\n", - "- **Minimum load**: Equipment can't run below a certain output" - ] + "source": "# Constraints\n\nIndustrial boiler with startup costs, minimum uptime, and load constraints.\n\nThis notebook introduces:\n\n- **StatusParameters**: Model on/off decisions with constraints\n- **Startup costs**: Penalties for turning equipment on\n- **Minimum uptime/downtime**: Prevent rapid cycling\n- **Minimum load**: Equipment can't run below a certain output" }, { "cell_type": "markdown", @@ -4080,7 +4067,7 @@ "start_time": "2025-12-12T10:09:38.098962Z" } }, - "source": "flow_system = fx.FlowSystem(timesteps)\n\n# Define and register a custom carrier for process steam\nsteam_carrier = fx.Carrier('steam', color='#87CEEB', unit='kW_th', description='Process steam')\nflow_system.add_carriers(steam_carrier)\n\nflow_system.add_elements(\n # === Buses ===\n fx.Bus('Gas', carrier='gas'),\n fx.Bus('Steam', carrier='steam'),\n # === Effect ===\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # === Gas Supply ===\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)],\n ),\n # === Main Industrial Boiler (with operational constraints) ===\n fx.linear_converters.Boiler(\n 'MainBoiler',\n thermal_efficiency=0.94, # High efficiency\n # StatusParameters define on/off behavior\n status_parameters=fx.StatusParameters(\n effects_per_startup={'costs': 50}, # 50€ startup cost\n min_uptime=4, # Must run at least 4 hours once started\n min_downtime=2, # Must stay off at least 2 hours\n ),\n thermal_flow=fx.Flow(\n 'Steam',\n bus='Steam',\n size=500,\n relative_minimum=0.3, # Minimum load: 30% = 150 kW\n ),\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n # === Backup Boiler (flexible, but less efficient) ===\n fx.linear_converters.Boiler(\n 'BackupBoiler',\n thermal_efficiency=0.85, # Lower efficiency\n # No status parameters = can turn on/off freely\n thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n # === Factory Steam Demand ===\n fx.Sink(\n 'Factory',\n inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)],\n ),\n)", + "source": "flow_system = fx.FlowSystem(timesteps)\n\n# Define and register a custom carrier for process steam\nsteam_carrier = fx.Carrier('steam', color='#87CEEB', unit='kW_th', description='Process steam')\nflow_system.add_carriers(steam_carrier)\n\nflow_system.add_elements(\n # === Buses ===\n fx.Bus('Gas', carrier='gas'),\n fx.Bus('Steam', carrier='steam'),\n # === Effect ===\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # === Gas Supply ===\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)],\n ),\n # === Main Industrial Boiler (with operational constraints) ===\n fx.linear_converters.Boiler(\n 'MainBoiler',\n thermal_efficiency=0.94, # High efficiency\n # StatusParameters define on/off behavior\n status_parameters=fx.StatusParameters(\n effects_per_startup={'costs': 50}, # 50€ startup cost\n min_uptime=4, # Must run at least 4 hours once started\n min_downtime=2, # Must stay off at least 2 hours\n ),\n thermal_flow=fx.Flow(\n 'Steam',\n bus='Steam',\n size=500,\n relative_minimum=0.3, # Minimum load: 30% = 150 kW\n ),\n fuel_flow=fx.Flow('Gas', bus='Gas', size=550), # size required for big-M constraints\n ),\n # === Backup Boiler (flexible, but less efficient) ===\n fx.linear_converters.Boiler(\n 'BackupBoiler',\n thermal_efficiency=0.85, # Lower efficiency\n # No status parameters = can turn on/off freely\n thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n # === Factory Steam Demand ===\n fx.Sink(\n 'Factory',\n inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)],\n ),\n)", "outputs": [], "execution_count": null }, diff --git a/docs/notebooks/05-multi-carrier-system.ipynb b/docs/notebooks/05-multi-carrier-system.ipynb index 9e1a77bcc..de2ce1b5f 100644 --- a/docs/notebooks/05-multi-carrier-system.ipynb +++ b/docs/notebooks/05-multi-carrier-system.ipynb @@ -4,20 +4,7 @@ "cell_type": "markdown", "id": "0", "metadata": {}, - "source": [ - "# Multi-Carrier System: Hospital with CHP\n", - "\n", - "## User Story\n", - "\n", - "> *You're the energy manager of a hospital. The facility needs both electricity and heat around the clock. You have a Combined Heat and Power (CHP) unit that produces both simultaneously, plus a gas boiler for backup heat and a grid connection for electricity. Your goal is to minimize energy costs while ensuring reliable supply.*\n", - "\n", - "This notebook introduces:\n", - "\n", - "- **Multiple energy carriers**: Electricity, heat, and gas in one system\n", - "- **CHP (Cogeneration)**: Equipment producing multiple outputs\n", - "- **Electricity market**: Buying and selling to the grid\n", - "- **Carrier colors**: Visual distinction between energy types" - ] + "source": "# Multi-Carrier\n\nHospital with CHP producing both electricity and heat.\n\nThis notebook introduces:\n\n- **Multiple energy carriers**: Electricity, heat, and gas in one system\n- **CHP (Cogeneration)**: Equipment producing multiple outputs\n- **Electricity market**: Buying and selling to the grid\n- **Carrier colors**: Visual distinction between energy types" }, { "cell_type": "markdown", diff --git a/docs/notebooks/06-piecewise-efficiency.ipynb b/docs/notebooks/06-piecewise-efficiency.ipynb index af3cb5375..bb50793ff 100644 --- a/docs/notebooks/06-piecewise-efficiency.ipynb +++ b/docs/notebooks/06-piecewise-efficiency.ipynb @@ -4,20 +4,7 @@ "cell_type": "markdown", "id": "0", "metadata": {}, - "source": [ - "# Piecewise Efficiency: Heat Pump with Variable COP\n", - "\n", - "## User Story\n", - "\n", - "> *You're installing a heat pump for a commercial building. The heat pump's efficiency (COP - Coefficient of Performance) varies with the outdoor temperature: it's more efficient in mild weather than in cold weather. You want to model this realistically to accurately predict operating costs.*\n", - "\n", - "This notebook introduces:\n", - "\n", - "- **Piecewise linear functions**: Approximate non-linear behavior\n", - "- **Variable efficiency**: COP changes with operating conditions\n", - "- **LinearConverter with segments**: Multiple operating points\n", - "- **Piecewise effects**: Non-linear cost curves" - ] + "source": "# Piecewise\n\nHeat pump with temperature-dependent COP and part-load curves.\n\nThis notebook introduces:\n\n- **Piecewise linear functions**: Approximate non-linear behavior\n- **Variable efficiency**: COP changes with operating conditions\n- **LinearConverter with segments**: Multiple operating points\n- **Piecewise effects**: Non-linear cost curves" }, { "cell_type": "markdown", diff --git a/docs/notebooks/07-scenarios-and-periods.ipynb b/docs/notebooks/07-scenarios-and-periods.ipynb index 37fc52732..c8294ba7e 100644 --- a/docs/notebooks/07-scenarios-and-periods.ipynb +++ b/docs/notebooks/07-scenarios-and-periods.ipynb @@ -4,20 +4,7 @@ "cell_type": "markdown", "id": "0", "metadata": {}, - "source": [ - "# Scenarios and Periods: Investment Planning Under Uncertainty\n", - "\n", - "## User Story\n", - "\n", - "> *You're planning a district heating system for a new residential development. The project spans 3 years, and you face uncertainty: Will the winter be mild or harsh? Will gas prices stay stable or spike? You need to make investment decisions today that work well across multiple possible futures.*\n", - "\n", - "This notebook introduces:\n", - "\n", - "- **Periods**: Multiple planning years with different conditions\n", - "- **Scenarios**: Uncertain futures (mild vs. harsh winter)\n", - "- **Scenario weights**: Probability-weighted optimization\n", - "- **Multi-dimensional data**: Parameters that vary by time, period, and scenario" - ] + "source": "# Scenarios\n\nMulti-year planning with uncertain demand scenarios.\n\nThis notebook introduces:\n\n- **Periods**: Multiple planning years with different conditions\n- **Scenarios**: Uncertain futures (mild vs. harsh winter)\n- **Scenario weights**: Probability-weighted optimization\n- **Multi-dimensional data**: Parameters that vary by time, period, and scenario" }, { "cell_type": "markdown", diff --git a/docs/notebooks/08-large-scale-optimization.ipynb b/docs/notebooks/08-large-scale-optimization.ipynb index 4d745f4b4..3dcbb4bb9 100644 --- a/docs/notebooks/08-large-scale-optimization.ipynb +++ b/docs/notebooks/08-large-scale-optimization.ipynb @@ -4,20 +4,7 @@ "cell_type": "markdown", "id": "0", "metadata": {}, - "source": [ - "# Large-Scale Optimization: Computational Efficiency Techniques\n", - "\n", - "## User Story\n", - "\n", - "> *You're planning a district energy system with a full year of hourly data (8,760 timesteps). The optimization takes hours to complete. You need to find ways to get good solutions faster for iterative design exploration.*\n", - "\n", - "This notebook introduces:\n", - "\n", - "- **Resampling**: Reduce time resolution (e.g., hourly → 4-hourly)\n", - "- **Clustering**: Identify typical periods (e.g., 8 representative days)\n", - "- **Two-stage optimization**: Size with reduced data, dispatch at full resolution\n", - "- **Speed vs. accuracy trade-offs**: When to use each technique" - ] + "source": "# Large-Scale\n\nSpeed up large problems with resampling and two-stage optimization.\n\nThis notebook introduces:\n\n- **Resampling**: Reduce time resolution (e.g., hourly → 4-hourly)\n- **Clustering**: Identify typical periods (e.g., 8 representative days)\n- **Two-stage optimization**: Size with reduced data, dispatch at full resolution\n- **Speed vs. accuracy trade-offs**: When to use each technique" }, { "cell_type": "markdown", diff --git a/docs/notebooks/09-plotting-and-data-access.ipynb b/docs/notebooks/09-plotting-and-data-access.ipynb new file mode 100644 index 000000000..cadcc240a --- /dev/null +++ b/docs/notebooks/09-plotting-and-data-access.ipynb @@ -0,0 +1,8884 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": "# Plotting\n\nAccess optimization results and create visualizations.\n\nThis notebook covers:\n\n- Loading saved FlowSystems from NetCDF files\n- Accessing data (flow rates, sizes, effects, charge states)\n- Time series plots (balance, flows, storage)\n- Aggregated plots (sizes, effects, duration curves)\n- Heatmaps with time reshaping\n- Sankey diagrams\n- Topology visualization\n- Color customization and export" + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "2", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:29.505282Z", + "start_time": "2025-12-12T12:06:26.542476Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "flixopt.config.CONFIG" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from pathlib import Path\n", + "\n", + "import flixopt as fx\n", + "\n", + "fx.CONFIG.notebook()" + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "## Generate Example Data\n", + "\n", + "First, run the script that generates three example FlowSystems with solutions:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "4", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:35.136859Z", + "start_time": "2025-12-12T12:06:29.554928Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Creating simple_system...\r\n", + " Optimizing...\r\n", + " Saving to /Users/felix/PycharmProjects/flixopt_182303/docs/notebooks/data/simple_system.nc4...\r\n", + " Done. Objective: 558.83\r\n", + "\r\n", + "Creating complex_system...\r\n", + " Optimizing...\r\n", + " Saving to /Users/felix/PycharmProjects/flixopt_182303/docs/notebooks/data/complex_system.nc4...\r\n", + " Done. Objective: 220.25\r\n", + "\r\n", + "Creating multiperiod_system...\r\n", + " Optimizing...\r\n", + " Saving to /Users/felix/PycharmProjects/flixopt_182303/docs/notebooks/data/multiperiod_system.nc4...\r\n", + " Done. Objective: 644.93\r\n", + "\r\n", + "All systems generated successfully!\r\n" + ] + } + ], + "source": [ + "# Run the generation script (only needed once, or to regenerate)\n", + "!python data/generate_example_systems.py" + ] + }, + { + "cell_type": "markdown", + "id": "5", + "metadata": {}, + "source": [ + "## 1. Loading Saved FlowSystems\n", + "\n", + "FlowSystems can be saved to and loaded from NetCDF files, preserving the full structure and solution:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "6", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:35.466083Z", + "start_time": "2025-12-12T12:06:35.210813Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded systems:\n", + " simple: 4 components, 2 buses\n", + " complex_sys: 9 components, 3 buses\n", + " multiperiod: 4 components, dims={'scenario': 2, 'period': 3, 'time': 49}\n" + ] + } + ], + "source": [ + "DATA_DIR = Path('data')\n", + "\n", + "# Load the three example systems\n", + "simple = fx.FlowSystem.from_netcdf(DATA_DIR / 'simple_system.nc4')\n", + "complex_sys = fx.FlowSystem.from_netcdf(DATA_DIR / 'complex_system.nc4')\n", + "multiperiod = fx.FlowSystem.from_netcdf(DATA_DIR / 'multiperiod_system.nc4')\n", + "\n", + "print('Loaded systems:')\n", + "print(f' simple: {len(simple.components)} components, {len(simple.buses)} buses')\n", + "print(f' complex_sys: {len(complex_sys.components)} components, {len(complex_sys.buses)} buses')\n", + "print(f' multiperiod: {len(multiperiod.components)} components, dims={dict(multiperiod.solution.sizes)}')" + ] + }, + { + "cell_type": "markdown", + "id": "7", + "metadata": {}, + "source": "## 2. Quick Overview: Balance Plot\n\nLet's start with the most common visualization - a balance plot showing energy flows:" + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "# Balance plot for the Heat bus - shows all inflows and outflows\n", + "simple.statistics.plot.balance('Heat')" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:35.534937Z", + "start_time": "2025-12-12T12:06:35.496736Z" + } + }, + "source": "### Accessing Plot Data\n\nEvery plot returns a `PlotResult` with both the figure and underlying data. Use `.data.to_dataframe()` to get a pandas DataFrame:" + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [], + "source": [ + "# Get plot result and access the underlying data\n", + "result = simple.statistics.plot.balance('Heat', show=False)\n", + "\n", + "# Convert to DataFrame for easy viewing/export\n", + "df = result.data.to_dataframe()\n", + "df.head(10)" + ] + }, + { + "cell_type": "markdown", + "id": "11", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:35.617665Z", + "start_time": "2025-12-12T12:06:35.585811Z" + } + }, + "source": "### Energy Totals\n\nGet total energy by flow using `flow_hours`:" + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12", + "metadata": {}, + "outputs": [], + "source": "import pandas as pd\n\n# Total energy per flow\ntotals = {var: float(simple.statistics.flow_hours[var].sum()) for var in simple.statistics.flow_hours.data_vars}\n\npd.Series(totals, name='Energy [kWh]').to_frame().T" + }, + { + "cell_type": "markdown", + "id": "13", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:35.754890Z", + "start_time": "2025-12-12T12:06:35.735084Z" + } + }, + "source": "## 3. Time Series Plots" + }, + { + "cell_type": "markdown", + "id": "14", + "metadata": {}, + "source": "### 3.1 Balance Plot\n\nShows inflows (positive) and outflows (negative) for a bus or component:" + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:35.874652Z", + "start_time": "2025-12-12T12:06:35.844281Z" + } + }, + "outputs": [], + "source": [ + "# Component balance (all flows of a component)\n", + "simple.statistics.plot.balance('ThermalStorage')" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": "### 3.2 Carrier Balance\n\nShows all flows of a specific carrier across the entire system:" + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:36.112518Z", + "start_time": "2025-12-12T12:06:36.004885Z" + } + }, + "outputs": [], + "source": [ + "complex_sys.statistics.plot.carrier_balance('heat')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "18", + "metadata": {}, + "outputs": [], + "source": [ + "complex_sys.statistics.plot.carrier_balance('electricity')" + ] + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:36.266666Z", + "start_time": "2025-12-12T12:06:36.198686Z" + } + }, + "source": "### 3.3 Flow Rates\n\nPlot multiple flow rates together:" + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20", + "metadata": {}, + "outputs": [], + "source": [ + "# All flows\n", + "simple.statistics.plot.flows()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "21", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:36.455687Z", + "start_time": "2025-12-12T12:06:36.450204Z" + } + }, + "outputs": [], + "source": [ + "# Flows filtered by component\n", + "simple.statistics.plot.flows(component='Boiler')" + ] + }, + { + "cell_type": "markdown", + "id": "32", + "metadata": {}, + "source": [ + "### 3.4 Storage Plot\n", + "\n", + "Combined view of storage charge state and flows:" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "33", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:41.765686Z", + "start_time": "2025-12-12T12:06:41.441569Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 5kB\n", + "Dimensions: (time: 169)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n", + "Data variables:\n", + " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n", + " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n", + " charge_state (time) float64 1kB 250.0 248.8 ... 102.5 200.0, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'time=%{x}
value=%{y}',\n", + " 'legendgroup': '',\n", + " 'line': {'color': 'black', 'width': 2},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'charge_state',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAABAb0AAAAAAABhvQDkzMzMz8G' ... 'LbxcFZQPDkQtTNoFlAAAAAAAAAaUA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y2'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'ThermalStorage Operation (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}},\n", + " 'yaxis2': {'overlaying': 'y', 'showgrid': False, 'side': 'right', 'title': {'text': 'Charge State'}}}\n", + "}))" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "simple.statistics.plot.storage('ThermalStorage')" + ] + }, + { + "cell_type": "markdown", + "id": "34", + "metadata": {}, + "source": [ + "### 3.5 Charge States Plot\n", + "\n", + "Plot charge state time series directly:" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "35", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:42.032694Z", + "start_time": "2025-12-12T12:06:41.807633Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 3kB\n", + "Dimensions: (time: 169)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-01-22\n", + "Data variables:\n", + " ThermalStorage (time) float64 1kB 250.0 248.8 247.5 ... 103.0 102.5 200.0, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=ThermalStorage
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage',\n", + " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'ThermalStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAABAb0AAAAAAABhvQDkzMzMz8G' ... 'LbxcFZQPDkQtTNoFlAAAAAAAAAaUA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Storage Charge States'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'Charge State'}}}\n", + "}))" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "simple.statistics.plot.charge_states('ThermalStorage')" + ] + }, + { + "cell_type": "markdown", + "id": "36", + "metadata": {}, + "source": [ + "## 4. Aggregated Plots" + ] + }, + { + "cell_type": "markdown", + "id": "37", + "metadata": {}, + "source": [ + "### 4.1 Sizes Plot\n", + "\n", + "Bar chart of component/flow sizes:" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "38", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:42.195142Z", + "start_time": "2025-12-12T12:06:42.126462Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 0B\n", + "Dimensions: ()\n", + "Data variables:\n", + " *empty*, figure=Figure({\n", + " 'data': [], 'layout': {'template': '...'}\n", + "}))" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "simple.statistics.plot.sizes()" + ] + }, + { + "cell_type": "markdown", + "id": "39", + "metadata": {}, + "source": [ + "### 4.2 Effects Plot\n", + "\n", + "Bar chart of effect totals by component:" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "40", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:42.497806Z", + "start_time": "2025-12-12T12:06:42.283099Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 24B\n", + "Dimensions: (effect: 1, component: 1)\n", + "Coordinates:\n", + " * effect (effect) object 8B 'costs'\n", + " * component (component) object 8B 'GasGrid'\n", + "Data variables:\n", + " total (effect, component) float64 8B 558.8, figure=Figure({\n", + " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'GasGrid',\n", + " 'marker': {'color': '#a4fc3b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'GasGrid',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['GasGrid'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'sDkY5qR2gUA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'costs (total) by component'},\n", + " 'xaxis': {'anchor': 'y',\n", + " 'categoryarray': [GasGrid],\n", + " 'categoryorder': 'array',\n", + " 'domain': [0.0, 1.0],\n", + " 'title': {'text': 'component'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "simple.statistics.plot.effects(effect='costs')" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "41", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:43.064579Z", + "start_time": "2025-12-12T12:06:42.560263Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 72B\n", + "Dimensions: (effect: 1, component: 4)\n", + "Coordinates:\n", + " * effect (effect) object 8B 'costs'\n", + " * component (component) object 32B 'CHP' 'ElectricityExport' ... 'GasGrid'\n", + "Data variables:\n", + " total (effect, component) float64 32B 78.0 -386.3 118.1 410.4, figure=Figure({\n", + " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'CHP',\n", + " 'marker': {'color': '#30123b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'CHP',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['CHP'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAACAU0A=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityExport',\n", + " 'marker': {'color': '#21e2b5', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityExport',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ElectricityExport'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': '3ObHwIskeMA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityImport',\n", + " 'marker': {'color': '#f7b836', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityImport',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ElectricityImport'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'A0vkOKSHXUA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'GasGrid',\n", + " 'marker': {'color': '#7a0402', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'GasGrid',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['GasGrid'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AUZx5ZymeUA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'costs (total) by component'},\n", + " 'xaxis': {'anchor': 'y',\n", + " 'categoryarray': [CHP, ElectricityExport,\n", + " ElectricityImport, GasGrid],\n", + " 'categoryorder': 'array',\n", + " 'domain': [0.0, 1.0],\n", + " 'title': {'text': 'component'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Multi-effect system: compare costs and CO2\n", + "complex_sys.statistics.plot.effects(effect='costs')" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "42", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:43.867944Z", + "start_time": "2025-12-12T12:06:43.136118Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 72B\n", + "Dimensions: (effect: 1, component: 4)\n", + "Coordinates:\n", + " * effect (effect) object 8B 'CO2'\n", + " * component (component) object 32B 'CHP' 'ElectricityExport' ... 'GasGrid'\n", + "Data variables:\n", + " total (effect, component) float64 32B 0.0 0.0 295.3 1.368e+03, figure=Figure({\n", + " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'CHP',\n", + " 'marker': {'color': '#30123b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'CHP',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['CHP'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityExport',\n", + " 'marker': {'color': '#21e2b5', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityExport',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ElectricityExport'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityImport',\n", + " 'marker': {'color': '#f7b836', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityImport',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ElectricityImport'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': '4q6Oo8Z0ckA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'GasGrid',\n", + " 'marker': {'color': '#7a0402', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'GasGrid',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['GasGrid'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AmXeaS1glUA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'CO2 (total) by component'},\n", + " 'xaxis': {'anchor': 'y',\n", + " 'categoryarray': [CHP, ElectricityExport,\n", + " ElectricityImport, GasGrid],\n", + " 'categoryorder': 'array',\n", + " 'domain': [0.0, 1.0],\n", + " 'title': {'text': 'component'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "complex_sys.statistics.plot.effects(effect='CO2')" + ] + }, + { + "cell_type": "markdown", + "id": "43", + "metadata": {}, + "source": [ + "### 4.3 Duration Curve\n", + "\n", + "Shows how often each power level is reached:" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "44", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:44.614581Z", + "start_time": "2025-12-12T12:06:44.248704Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 3kB\n", + "Dimensions: (duration: 169)\n", + "Coordinates:\n", + " * duration (duration) int64 1kB 0 1 2 3 4 5 6 ... 163 164 165 166 167 168\n", + "Data variables:\n", + " Boiler(Heat) (duration) float64 1kB nan 137.8 134.1 133.1 ... 0.0 0.0 0.0, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
duration=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': {'bdata': ('AAABAAIAAwAEAAUABgAHAAgACQAKAA' ... '4AnwCgAKEAogCjAKQApQCmAKcAqAA='),\n", + " 'dtype': 'i2'},\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('/////////39oQtzNVzphQLt+ZyCBw2' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Duration Curve'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "simple.statistics.plot.duration_curve('Boiler(Heat)')" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "45", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:46.830321Z", + "start_time": "2025-12-12T12:06:46.454534Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 2kB\n", + "Dimensions: (duration: 73)\n", + "Coordinates:\n", + " * duration (duration) int64 584B 0 1 2 3 4 5 ... 67 68 69 70 71 72\n", + "Data variables:\n", + " CHP(Heat) (duration) float64 584B nan 85.0 85.0 ... 0.0 0.0 0.0\n", + " HeatPump(Heat) (duration) float64 584B nan 40.0 40.0 ... 0.0 0.0 0.0\n", + " BackupBoiler(Heat) (duration) float64 584B nan 0.0 0.0 0.0 ... 0.0 0.0 0.0, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=CHP(Heat)
duration=%{x}
value=%{y}',\n", + " 'legendgroup': 'CHP(Heat)',\n", + " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'CHP(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", + " 'dtype': 'i1'},\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('/////////38AAAAAAEBVQAAAAAAAQF' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=HeatPump(Heat)
duration=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatPump(Heat)',\n", + " 'line': {'color': '#EF553B', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'HeatPump(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", + " 'dtype': 'i1'},\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('/////////38AAAAAAABEQAAAAAAAAE' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=BackupBoiler(Heat)
duration=%{x}
value=%{y}',\n", + " 'legendgroup': 'BackupBoiler(Heat)',\n", + " 'line': {'color': '#00CC96', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'BackupBoiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", + " 'dtype': 'i1'},\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('/////////38AAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Duration Curve'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Multiple variables\n", + "complex_sys.statistics.plot.duration_curve(['CHP(Heat)', 'HeatPump(Heat)', 'BackupBoiler(Heat)'])" + ] + }, + { + "cell_type": "markdown", + "id": "46", + "metadata": {}, + "source": [ + "## 5. Heatmaps\n", + "\n", + "Heatmaps reshape time series into 2D grids (e.g., hour-of-day vs day):" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "47", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:47.605052Z", + "start_time": "2025-12-12T12:06:47.328779Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 2kB\n", + "Dimensions: (timeframe: 8, timestep: 24)\n", + "Coordinates:\n", + " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n", + " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", + "Data variables:\n", + " value (timestep, timeframe) float64 2kB 32.48 42.84 47.28 ... 124.5 nan, figure=Figure({\n", + " 'data': [{'coloraxis': 'coloraxis',\n", + " 'hovertemplate': 'timeframe: %{x}
timestep: %{y}
Boiler(Heat)|flow_rate: %{z}',\n", + " 'name': '0',\n", + " 'type': 'heatmap',\n", + " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n", + " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", + " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", + " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", + " dtype=object),\n", + " 'yaxis': 'y',\n", + " 'z': {'bdata': ('5ZuWpeU9QED8nmEA1mtFQOR8bxYopE' ... '//////M0D1ufhH+R5fQAAAAAAAAPh/'),\n", + " 'dtype': 'f8',\n", + " 'shape': '24, 8'}}],\n", + " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'Boiler(Heat)|flow_rate'}},\n", + " 'colorscale': [[0.0, '#30123b'],\n", + " [0.07142857142857142, '#4145ab'],\n", + " [0.14285714285714285, '#4675ed'],\n", + " [0.21428571428571427, '#39a2fc'],\n", + " [0.2857142857142857, '#1bcfd4'],\n", + " [0.35714285714285715, '#24eca6'],\n", + " [0.42857142857142855, '#61fc6c'], [0.5,\n", + " '#a4fc3b'], [0.5714285714285714,\n", + " '#d1e834'], [0.6428571428571429,\n", + " '#f3c63a'], [0.7142857142857143,\n", + " '#fe9b2d'], [0.7857142857142857,\n", + " '#f36315'], [0.8571428571428571,\n", + " '#d93806'], [0.9285714285714286,\n", + " '#b11901'], [1.0, '#7a0402']]},\n", + " 'margin': {'t': 60},\n", + " 'template': '...',\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", + " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", + "}))" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Auto-reshape based on data frequency\n", + "simple.statistics.plot.heatmap('Boiler(Heat)')" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "48", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:48.600387Z", + "start_time": "2025-12-12T12:06:47.811215Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 2kB\n", + "Dimensions: (timeframe: 8, timestep: 24)\n", + "Coordinates:\n", + " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n", + " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", + "Data variables:\n", + " value (timestep, timeframe) float64 2kB 250.0 1.379e-14 ... 102.5 nan, figure=Figure({\n", + " 'data': [{'coloraxis': 'coloraxis',\n", + " 'hovertemplate': ('timeframe: %{x}
timestep: %' ... 'rge_state: %{z}'),\n", + " 'name': '0',\n", + " 'type': 'heatmap',\n", + " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n", + " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", + " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", + " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", + " dtype=object),\n", + " 'yaxis': 'y',\n", + " 'z': {'bdata': ('AAAAAABAb0DkBdNVug0PPZGJ+Pa5Lj' ... 'AAAAAAAADw5ELUzaBZQAAAAAAAAPh/'),\n", + " 'dtype': 'f8',\n", + " 'shape': '24, 8'}}],\n", + " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'ThermalStorage|charge_state'}},\n", + " 'colorscale': [[0.0, '#30123b'],\n", + " [0.07142857142857142, '#4145ab'],\n", + " [0.14285714285714285, '#4675ed'],\n", + " [0.21428571428571427, '#39a2fc'],\n", + " [0.2857142857142857, '#1bcfd4'],\n", + " [0.35714285714285715, '#24eca6'],\n", + " [0.42857142857142855, '#61fc6c'], [0.5,\n", + " '#a4fc3b'], [0.5714285714285714,\n", + " '#d1e834'], [0.6428571428571429,\n", + " '#f3c63a'], [0.7142857142857143,\n", + " '#fe9b2d'], [0.7857142857142857,\n", + " '#f36315'], [0.8571428571428571,\n", + " '#d93806'], [0.9285714285714286,\n", + " '#b11901'], [1.0, '#7a0402']]},\n", + " 'margin': {'t': 60},\n", + " 'template': '...',\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", + " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", + "}))" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Storage charge state heatmap\n", + "simple.statistics.plot.heatmap('ThermalStorage')" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "49", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:49.215856Z", + "start_time": "2025-12-12T12:06:48.901232Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 2kB\n", + "Dimensions: (timeframe: 8, timestep: 24)\n", + "Coordinates:\n", + " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n", + " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", + "Data variables:\n", + " value (timestep, timeframe) float64 2kB 32.48 27.28 31.72 ... 24.48 nan, figure=Figure({\n", + " 'data': [{'coloraxis': 'coloraxis',\n", + " 'hovertemplate': 'timeframe: %{x}
timestep: %{y}
Office(Heat)|flow_rate: %{z}',\n", + " 'name': '0',\n", + " 'type': 'heatmap',\n", + " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n", + " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", + " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", + " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", + " dtype=object),\n", + " 'yaxis': 'y',\n", + " 'z': {'bdata': ('5ZuWpeU9QEDqSDirMEc7QB8FVNfUtz' ... 'AAAAAANECu5+If5Xs4QAAAAAAAAPh/'),\n", + " 'dtype': 'f8',\n", + " 'shape': '24, 8'}}],\n", + " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'Office(Heat)|flow_rate'}},\n", + " 'colorscale': [[0.0, 'rgb(247,251,255)'], [0.125,\n", + " 'rgb(222,235,247)'], [0.25,\n", + " 'rgb(198,219,239)'], [0.375,\n", + " 'rgb(158,202,225)'], [0.5,\n", + " 'rgb(107,174,214)'], [0.625,\n", + " 'rgb(66,146,198)'], [0.75,\n", + " 'rgb(33,113,181)'], [0.875,\n", + " 'rgb(8,81,156)'], [1.0,\n", + " 'rgb(8,48,107)']]},\n", + " 'template': '...',\n", + " 'title': {'text': 'Heat Demand Pattern'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", + " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", + "}))" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Custom colorscale\n", + "simple.statistics.plot.heatmap('Office(Heat)', color_continuous_scale='Blues', title='Heat Demand Pattern')" + ] + }, + { + "cell_type": "markdown", + "id": "50", + "metadata": {}, + "source": [ + "## 6. Sankey Diagrams\n", + "\n", + "Sankey diagrams visualize energy flows through the system." + ] + }, + { + "cell_type": "markdown", + "id": "51", + "metadata": {}, + "source": [ + "### 6.1 Flow Sankey\n", + "\n", + "Total energy flows:" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "52", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:49.583991Z", + "start_time": "2025-12-12T12:06:49.299561Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 1kB\n", + "Dimensions: (link: 6)\n", + "Coordinates:\n", + " * link (link) int64 48B 0 1 2 3 4 5\n", + " source (link) \n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 3kB\n", + "Dimensions: (link: 12)\n", + "Coordinates:\n", + " * link (link) int64 96B 0 1 2 3 4 5 6 7 8 9 10 11\n", + " source (link) \n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 0B\n", + "Dimensions: (link: 0)\n", + "Coordinates:\n", + " * link (link) float64 0B \n", + " source (link) float64 0B \n", + " target (link) float64 0B \n", + " label (link) float64 0B \n", + " carrier (link) float64 0B \n", + "Data variables:\n", + " value (link) float64 0B , figure=Figure({\n", + " 'data': [{'link': {'label': [], 'source': [], 'target': [], 'value': []},\n", + " 'node': {'color': [], 'label': [], 'line': {'color': 'black', 'width': 0.5}, 'pad': 15, 'thickness': 20},\n", + " 'type': 'sankey'}],\n", + " 'layout': {'template': '...', 'title': {'text': 'Investment Sizes (Capacities)'}}\n", + "}))" + ] + }, + "execution_count": 49, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "multiperiod.statistics.plot.sankey.sizes()" + ] + }, + { + "cell_type": "markdown", + "id": "56", + "metadata": {}, + "source": [ + "### 6.3 Peak Flow Sankey\n", + "\n", + "Maximum flow rates (peak power):" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "57", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:51.458035Z", + "start_time": "2025-12-12T12:06:51.237341Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 1kB\n", + "Dimensions: (link: 6)\n", + "Coordinates:\n", + " * link (link) int64 48B 0 1 2 3 4 5\n", + " source (link) \n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 184B\n", + "Dimensions: (link: 1)\n", + "Coordinates:\n", + " * link (link) int64 8B 0\n", + " source (link) \n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 488B\n", + "Dimensions: (link: 2)\n", + "Coordinates:\n", + " * link (link) int64 16B 0 1\n", + " source (link) \n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 1kB\n", + "Dimensions: (link: 5)\n", + "Coordinates:\n", + " * link (link) int64 40B 0 1 2 3 4\n", + " source (link) \n", + " window.PlotlyConfig = {MathJaxConfig: 'local'};\n", + " if (window.MathJax && window.MathJax.Hub && window.MathJax.Hub.Config) {window.MathJax.Hub.Config({SVG: {font: \"STIX-Web\"}});}\n", + " \n", + " \n", + " " + ] + }, + "jetTransient": { + "display_id": null + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
" + ] + }, + "jetTransient": { + "display_id": null + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "simple.topology.plot()" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "id": "66", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:54.740689Z", + "start_time": "2025-12-12T12:06:54.669190Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
" + ] + }, + "jetTransient": { + "display_id": null + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "complex_sys.topology.plot(title='Complex System Topology')" + ] + }, + { + "cell_type": "markdown", + "id": "67", + "metadata": {}, + "source": [ + "### 7.2 Topology Info\n", + "\n", + "Get node and edge information programmatically:" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "id": "68", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:54.957830Z", + "start_time": "2025-12-12T12:06:54.902442Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Nodes:\n", + " GasGrid: Component\n", + " Boiler: Component\n", + " ThermalStorage: Component\n", + " Office: Component\n", + " Gas: Bus\n", + " Heat: Bus\n", + "\n", + "Edges (flows):\n", + " Gas -> Boiler: Boiler(Gas)\n", + " Boiler -> Heat: Boiler(Heat)\n", + " GasGrid -> Gas: GasGrid(Gas)\n", + " Heat -> Office: Office(Heat)\n", + " Heat -> ThermalStorage: ThermalStorage(Charge)\n", + " ThermalStorage -> Heat: ThermalStorage(Discharge)\n" + ] + } + ], + "source": [ + "nodes, edges = simple.topology.infos()\n", + "\n", + "print('Nodes:')\n", + "for label, info in nodes.items():\n", + " print(f' {label}: {info[\"class\"]}')\n", + "\n", + "print('\\nEdges (flows):')\n", + "for label, info in edges.items():\n", + " print(f' {info[\"start\"]} -> {info[\"end\"]}: {label}')" + ] + }, + { + "cell_type": "markdown", + "id": "69", + "metadata": {}, + "source": [ + "## 8. Multi-Period/Scenario Data\n", + "\n", + "Working with multi-dimensional results:" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "70", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:55.088528Z", + "start_time": "2025-12-12T12:06:55.064186Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Multiperiod system dimensions:\n", + " Periods: [2024, 2025, 2026]\n", + " Scenarios: ['high_demand', 'low_demand']\n", + " Solution dims: {'scenario': 2, 'period': 3, 'time': 49}\n" + ] + } + ], + "source": [ + "print('Multiperiod system dimensions:')\n", + "print(f' Periods: {list(multiperiod.periods)}')\n", + "print(f' Scenarios: {list(multiperiod.scenarios)}')\n", + "print(f' Solution dims: {dict(multiperiod.solution.sizes)}')" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "id": "71", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:57.157602Z", + "start_time": "2025-12-12T12:06:55.650661Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 10kB\n", + "Dimensions: (time: 49, period: 3, scenario: 2)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 392B 2024-01-01 ... 2024...\n", + " * period (period) int64 24B 2024 2025 2026\n", + " * scenario (scenario) scena' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x4',\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'rxMNlDwFS20eeOpEfAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y4'},\n", + " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x5',\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'rxMNlDwGC20eeOpEfAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y5'},\n", + " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x6',\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'rxMNlDwGC20eeOpEfAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y6'},\n", + " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'Vm3JI8wDyyyUAFXDnAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x2',\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'Vm3JI8wDyyyUAFXDnAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y2'},\n", + " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x3',\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'Vm3JI8wDyyyUAFXDnAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y3'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x4',\n", + " 'y': {'bdata': ('5ZuWpeU9RsDsqeLGgqdEwEfXQkqFnk' ... 'JRKxBKPe7+DqGhoTi9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y4'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x5',\n", + " 'y': {'bdata': ('5ZuWpeU9RsDsqeLGgqdEwEfXQkqFnk' ... '1P1R87PWP9SoFav0e9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y5'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x6',\n", + " 'y': {'bdata': ('5ZuWpeU9RsDPqeLGgqdEwFTXQkqFnk' ... 'AAAAAAgGP9SoFav0e9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y6'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('EgPMGubHPsAnjH0z/HU4wAkgRYDluD' ... 'j1K22OPWP9SoFavzg9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x2',\n", + " 'y': {'bdata': ('EgPMGubHPsCfi30z/HU4wDogRYDluD' ... '2guF0/PWP9SoFavzg9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y2'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x3',\n", + " 'y': {'bdata': ('EgPMGubHPsCfi30z/HU4wDogRYDluD' ... 'DGEbwovWP9SoFavzg9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y3'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x4',\n", + " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAoPWP9SoFav0' ... 'SjViA+vW73dwgNDQU9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y4'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x5',\n", + " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAoPWP9SoFav0' ... '6n6o9DvWP9SoFav0g9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y5'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x6',\n", + " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAABHvWT9SoFavz' ... 'AAAAAAAGP9SoFav0g9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y6'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAKPvjgg49CPQAAAAAAAD' ... 'j1K7WPvWP9SoFavzm9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x2',\n", + " 'y': {'bdata': ('AAAAAAAAAABj/UqBWr9YvQAAAAAAAD' ... '2guF0xvWP9SoFavzm9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y2'},\n", + " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x3',\n", + " 'y': {'bdata': ('AAAAAAAAAABj/UqBWr9YvQAAAAAAAD' ... '9y3IcWvWP9SoFavzm9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y3'},\n", + " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Building(Heat)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Building(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x4',\n", + " 'y': {'bdata': ('5ZuWpeU9RkDmqeLGgqdEQGDXQkqFnk' ... 'rxMNlDQF+20eeOpEdAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y4'},\n", + " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Building(Heat)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Building(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x5',\n", + " 'y': {'bdata': ('5ZuWpeU9RkDmqeLGgqdEQGDXQkqFnk' ... 'rxMNlDQF+20eeOpEdAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y5'},\n", + " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Building(Heat)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Building(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x6',\n", + " 'y': {'bdata': ('5ZuWpeU9RkDmqeLGgqdEQGDXQkqFnk' ... 'rxMNlDQF+20eeOpEdAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y6'},\n", + " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Building(Heat)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Building(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('EgPMGubHPkACjH0z/HU4QCMgRYDluD' ... 'Vm3JI8QD2yyUAFXDlAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Building(Heat)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Building(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x2',\n", + " 'y': {'bdata': ('EgPMGubHPkACjH0z/HU4QCMgRYDluD' ... 'Vm3JI8QD2yyUAFXDlAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y2'},\n", + " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n", + " 'legendgroup': 'Building(Heat)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Building(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x3',\n", + " 'y': {'bdata': ('EgPMGubHPkACjH0z/HU4QCMgRYDluD' ... 'Vm3JI8QD2yyUAFXDlAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y3'}],\n", + " 'layout': {'annotations': [{'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'period=2024',\n", + " 'x': 0.15666666666666665,\n", + " 'xanchor': 'center',\n", + " 'xref': 'paper',\n", + " 'y': 1.0,\n", + " 'yanchor': 'bottom',\n", + " 'yref': 'paper'},\n", + " {'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'period=2025',\n", + " 'x': 0.49,\n", + " 'xanchor': 'center',\n", + " 'xref': 'paper',\n", + " 'y': 1.0,\n", + " 'yanchor': 'bottom',\n", + " 'yref': 'paper'},\n", + " {'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'period=2026',\n", + " 'x': 0.8233333333333333,\n", + " 'xanchor': 'center',\n", + " 'xref': 'paper',\n", + " 'y': 1.0,\n", + " 'yanchor': 'bottom',\n", + " 'yref': 'paper'},\n", + " {'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'scenario=low_demand',\n", + " 'textangle': 90,\n", + " 'x': 0.98,\n", + " 'xanchor': 'left',\n", + " 'xref': 'paper',\n", + " 'y': 0.2425,\n", + " 'yanchor': 'middle',\n", + " 'yref': 'paper'},\n", + " {'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'scenario=high_demand',\n", + " 'textangle': 90,\n", + " 'x': 0.98,\n", + " 'xanchor': 'left',\n", + " 'xref': 'paper',\n", + " 'y': 0.7575000000000001,\n", + " 'yanchor': 'middle',\n", + " 'yref': 'paper'}],\n", + " 'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Heat (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 0.3133333333333333], 'title': {'text': 'time'}},\n", + " 'xaxis2': {'anchor': 'y2',\n", + " 'domain': [0.3333333333333333, 0.6466666666666666],\n", + " 'matches': 'x',\n", + " 'title': {'text': 'time'}},\n", + " 'xaxis3': {'anchor': 'y3', 'domain': [0.6666666666666666, 0.98], 'matches': 'x', 'title': {'text': 'time'}},\n", + " 'xaxis4': {'anchor': 'y4', 'domain': [0.0, 0.3133333333333333], 'matches': 'x', 'showticklabels': False},\n", + " 'xaxis5': {'anchor': 'y5',\n", + " 'domain': [0.3333333333333333, 0.6466666666666666],\n", + " 'matches': 'x',\n", + " 'showticklabels': False},\n", + " 'xaxis6': {'anchor': 'y6', 'domain': [0.6666666666666666, 0.98], 'matches': 'x', 'showticklabels': False},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 0.485], 'title': {'text': 'value'}},\n", + " 'yaxis2': {'anchor': 'x2', 'domain': [0.0, 0.485], 'matches': 'y', 'showticklabels': False},\n", + " 'yaxis3': {'anchor': 'x3', 'domain': [0.0, 0.485], 'matches': 'y', 'showticklabels': False},\n", + " 'yaxis4': {'anchor': 'x4', 'domain': [0.515, 1.0], 'matches': 'y', 'title': {'text': 'value'}},\n", + " 'yaxis5': {'anchor': 'x5', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False},\n", + " 'yaxis6': {'anchor': 'x6', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False}}\n", + "}))" + ] + }, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Balance plot with faceting by scenario\n", + "multiperiod.statistics.plot.balance('Heat')" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "id": "72", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:57.734537Z", + "start_time": "2025-12-12T12:06:57.451036Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 2kB\n", + "Dimensions: (time: 49)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 392B 2024-01-01 ... 2024...\n", + "Data variables:\n", + " Boiler(Heat) (time) float64 392B -0.0 -0.0 -0.0 ... -47.29 nan\n", + " ThermalStorage(Discharge) (time) float64 392B -44.48 -41.31 ... nan\n", + " ThermalStorage(Charge) (time) float64 392B 0.0 4.263e-14 ... nan\n", + " Building(Heat) (time) float64 392B 44.48 41.31 ... 47.29 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'rxMNlDwFS20eeOpEfAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9RsDsqeLGgqdEwEfXQkqFnk' ... 'JRKxBKPe7+DqGhoTi9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAoPWP9SoFav0' ... 'SjViA+vW73dwgNDQU9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Building(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Building(Heat)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Building(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n", + " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n", + " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n", + " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n", + " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n", + " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n", + " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n", + " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n", + " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n", + " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n", + " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n", + " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", + " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9RkDmqeLGgqdEQGDXQkqFnk' ... 'rxMNlDQF+20eeOpEdAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Heat (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Filter to specific scenario/period\n", + "multiperiod.statistics.plot.balance('Heat', select={'scenario': 'high_demand', 'period': 2024})" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "id": "73", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:58.022014Z", + "start_time": "2025-12-12T12:06:57.778237Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 1kB\n", + "Dimensions: (link: 5)\n", + "Coordinates:\n", + " * link (link) int64 40B 0 1 2 3 4\n", + " source (link) \n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 7kB\n", + "Dimensions: (time: 169)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n", + "Data variables:\n", + " Boiler(Heat) (time) float64 1kB -32.48 -29.31 ... -124.5 nan\n", + " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n", + " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n", + " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#66c2a5', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9QMD3U8WNBU89wHjXQkqFnk' ... '////8zwPW5+Ef5Hl/AAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#fc8d62', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#8da0cb', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Office(Heat)',\n", + " 'marker': {'color': '#e78ac3', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Office(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Heat (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Using a colorscale name\n", + "simple.statistics.plot.balance('Heat', colors='Set2')" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "id": "76", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:59.181165Z", + "start_time": "2025-12-12T12:06:58.735466Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 7kB\n", + "Dimensions: (time: 169)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n", + "Data variables:\n", + " Boiler(Heat) (time) float64 1kB -32.48 -29.31 ... -124.5 nan\n", + " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n", + " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n", + " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#e41a1c', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9QMD3U8WNBU89wHjXQkqFnk' ... '////8zwPW5+Ef5Hl/AAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#377eb8', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#4daf4a', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Office(Heat)',\n", + " 'marker': {'color': '#984ea3', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Office(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Heat (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 43, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Using a list of colors\n", + "simple.statistics.plot.balance('Heat', colors=['#e41a1c', '#377eb8', '#4daf4a', '#984ea3'])" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "77", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:59.730556Z", + "start_time": "2025-12-12T12:06:59.234563Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 7kB\n", + "Dimensions: (time: 169)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n", + "Data variables:\n", + " Boiler(Heat) (time) float64 1kB -32.48 -29.31 ... -124.5 nan\n", + " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n", + " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n", + " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': 'orangered', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9QMD3U8WNBU89wHjXQkqFnk' ... '////8zwPW5+Ef5Hl/AAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': 'lightblue', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': 'steelblue', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Office(Heat)',\n", + " 'marker': {'color': 'forestgreen', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Office(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Heat (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 44, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Using a dictionary for specific labels\n", + "simple.statistics.plot.balance(\n", + " 'Heat',\n", + " colors={\n", + " 'Boiler(Heat)': 'orangered',\n", + " 'ThermalStorage(Charge)': 'steelblue',\n", + " 'ThermalStorage(Discharge)': 'lightblue',\n", + " 'Office(Heat)': 'forestgreen',\n", + " },\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "78", + "metadata": {}, + "source": [ + "## 10. Exporting Results\n", + "\n", + "Plots return a `PlotResult` with data and figure that can be exported:" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "id": "79", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:07:00.118627Z", + "start_time": "2025-12-12T12:06:59.813869Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "PlotResult contains:\n", + " data: Dataset with vars ['Boiler(Heat)', 'ThermalStorage(Discharge)', 'ThermalStorage(Charge)', 'Office(Heat)']\n", + " figure: Figure\n" + ] + } + ], + "source": [ + "# Get plot result\n", + "result = simple.statistics.plot.balance('Heat')\n", + "\n", + "print('PlotResult contains:')\n", + "print(f' data: {type(result.data).__name__} with vars {list(result.data.data_vars)}')\n", + "print(f' figure: {type(result.figure).__name__}')" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "id": "80", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:07:00.477422Z", + "start_time": "2025-12-12T12:07:00.433079Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Boiler(Heat)ThermalStorage(Discharge)ThermalStorage(Charge)Office(Heat)
time
2024-01-15 00:00:00-32.483571-0.000000e+000.000000e+0032.483571
2024-01-15 01:00:00-29.3086785.275242e-13-3.747575e-1329.308678
2024-01-15 02:00:00-33.238443-7.086767e-138.792069e-1333.238443
2024-01-15 03:00:00-101.411593-3.516828e-136.379644e+0137.615149
2024-01-15 04:00:00-128.829233-5.613288e-131.000000e+0228.829233
\n", + "
" + ], + "text/plain": [ + " Boiler(Heat) ThermalStorage(Discharge) \\\n", + "time \n", + "2024-01-15 00:00:00 -32.483571 -0.000000e+00 \n", + "2024-01-15 01:00:00 -29.308678 5.275242e-13 \n", + "2024-01-15 02:00:00 -33.238443 -7.086767e-13 \n", + "2024-01-15 03:00:00 -101.411593 -3.516828e-13 \n", + "2024-01-15 04:00:00 -128.829233 -5.613288e-13 \n", + "\n", + " ThermalStorage(Charge) Office(Heat) \n", + "time \n", + "2024-01-15 00:00:00 0.000000e+00 32.483571 \n", + "2024-01-15 01:00:00 -3.747575e-13 29.308678 \n", + "2024-01-15 02:00:00 8.792069e-13 33.238443 \n", + "2024-01-15 03:00:00 6.379644e+01 37.615149 \n", + "2024-01-15 04:00:00 1.000000e+02 28.829233 " + ] + }, + "execution_count": 46, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Export data to pandas DataFrame\n", + "df = result.data.to_dataframe()\n", + "df.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "id": "81", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:07:00.727727Z", + "start_time": "2025-12-12T12:07:00.707525Z" + } + }, + "outputs": [], + "source": [ + "# Export figure to HTML (interactive)\n", + "# result.figure.write_html('balance_plot.html')\n", + "\n", + "# Export figure to image\n", + "# result.figure.write_image('balance_plot.png', scale=2)" + ] + }, + { + "cell_type": "markdown", + "id": "85", + "metadata": {}, + "source": [ + "## Summary\n", + "\n", + "### Data Access\n", + "\n", + "| Property | Description |\n", + "|----------|-------------|\n", + "| `statistics.flow_rates` | Time series of flow rates (power) |\n", + "| `statistics.flow_hours` | Energy values (rate × duration) |\n", + "| `statistics.sizes` | Component/flow capacities |\n", + "| `statistics.charge_states` | Storage charge levels |\n", + "| `statistics.temporal_effects` | Effects per timestep |\n", + "| `statistics.periodic_effects` | Effects per period |\n", + "| `statistics.total_effects` | Aggregated effect totals |\n", + "| `topology.carrier_colors` | Cached carrier color mapping |\n", + "| `topology.component_colors` | Cached component color mapping |\n", + "| `topology.bus_colors` | Cached bus color mapping |\n", + "\n", + "### Plot Methods\n", + "\n", + "| Method | Description |\n", + "|--------|-------------|\n", + "| `plot.balance(node)` | Stacked bar of in/outflows |\n", + "| `plot.carrier_balance(carrier)` | Balance for all flows of a carrier |\n", + "| `plot.flows(variables)` | Time series line/area plot |\n", + "| `plot.storage(component)` | Combined charge state and flows |\n", + "| `plot.charge_states(component)` | Charge state time series |\n", + "| `plot.sizes()` | Bar chart of sizes |\n", + "| `plot.effects(effect)` | Bar chart of effect contributions |\n", + "| `plot.duration_curve(variables)` | Sorted duration curve |\n", + "| `plot.heatmap(variable)` | 2D time-reshaped heatmap |\n", + "| `plot.sankey.flows()` | Energy flow Sankey |\n", + "| `plot.sankey.sizes()` | Capacity Sankey |\n", + "| `plot.sankey.peak_flow()` | Peak power Sankey |\n", + "| `plot.sankey.effects(effect)` | Effect allocation Sankey |\n", + "| `topology.plot()` | System structure diagram |" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/notebooks/data/generate_example_systems.py b/docs/notebooks/data/generate_example_systems.py new file mode 100644 index 000000000..a46ed59dd --- /dev/null +++ b/docs/notebooks/data/generate_example_systems.py @@ -0,0 +1,333 @@ +"""Generate example FlowSystem files for the plotting notebook. + +This script creates three FlowSystems of varying complexity: +1. simple_system - Basic heat system (boiler + storage + sink) +2. complex_system - Multi-carrier with multiple effects and piecewise efficiency +3. multiperiod_system - System with periods and scenarios + +Run this script to regenerate the example data files. +""" + +from pathlib import Path + +import numpy as np +import pandas as pd + +import flixopt as fx + +# Output directory (same as this script) +try: + OUTPUT_DIR = Path(__file__).parent +except NameError: + # Running in notebook context (e.g., mkdocs-jupyter) + OUTPUT_DIR = Path('docs/notebooks/data') + + +def create_simple_system() -> fx.FlowSystem: + """Create a simple heat system with boiler, storage, and demand. + + Components: + - Gas boiler (150 kW) + - Thermal storage (500 kWh) + - Office heat demand + + One week, hourly resolution. + """ + # One week, hourly + timesteps = pd.date_range('2024-01-15', periods=168, freq='h') + + # Create demand pattern + hours = np.arange(168) + hour_of_day = hours % 24 + day_of_week = (hours // 24) % 7 + + base_demand = np.where((hour_of_day >= 7) & (hour_of_day <= 18), 80, 30) + weekend_factor = np.where(day_of_week >= 5, 0.5, 1.0) + + np.random.seed(42) + heat_demand = base_demand * weekend_factor + np.random.normal(0, 5, len(hours)) + heat_demand = np.clip(heat_demand, 20, 100) + + # Time-varying gas price + gas_price = np.where((hour_of_day >= 6) & (hour_of_day <= 22), 0.08, 0.05) + + fs = fx.FlowSystem(timesteps) + fs.add_elements( + fx.Bus('Gas', carrier='gas'), + fx.Bus('Heat', carrier='heat'), + fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True), + fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_price)]), + fx.linear_converters.Boiler( + 'Boiler', + thermal_efficiency=0.92, + thermal_flow=fx.Flow('Heat', bus='Heat', size=150), + fuel_flow=fx.Flow('Gas', bus='Gas'), + ), + fx.Storage( + 'ThermalStorage', + capacity_in_flow_hours=500, + initial_charge_state=250, + minimal_final_charge_state=200, + eta_charge=0.98, + eta_discharge=0.98, + relative_loss_per_hour=0.005, + charging=fx.Flow('Charge', bus='Heat', size=100), + discharging=fx.Flow('Discharge', bus='Heat', size=100), + ), + fx.Sink('Office', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]), + ) + return fs + + +def create_complex_system() -> fx.FlowSystem: + """Create a complex multi-carrier system with multiple effects. + + Components: + - Gas grid (with CO2 emissions) + - Electricity grid (with time-varying price and CO2) + - CHP with piecewise efficiency + - Heat pump + - Gas boiler (backup) + - Thermal storage + - Heat demand + + Effects: costs (objective), CO2 + + Three days, hourly resolution. + """ + timesteps = pd.date_range('2024-06-01', periods=72, freq='h') + hours = np.arange(72) + hour_of_day = hours % 24 + + # Demand profiles + np.random.seed(123) + heat_demand = 50 + 30 * np.sin(2 * np.pi * hour_of_day / 24 - np.pi / 2) + np.random.normal(0, 5, 72) + heat_demand = np.clip(heat_demand, 20, 100) + + electricity_demand = 20 + 15 * np.sin(2 * np.pi * hour_of_day / 24) + np.random.normal(0, 3, 72) + electricity_demand = np.clip(electricity_demand, 10, 50) + + # Price profiles + electricity_price = np.where((hour_of_day >= 8) & (hour_of_day <= 20), 0.25, 0.12) + gas_price = 0.06 + + # CO2 factors (kg/kWh) + electricity_co2 = np.where((hour_of_day >= 8) & (hour_of_day <= 20), 0.4, 0.3) # Higher during peak + gas_co2 = 0.2 + + fs = fx.FlowSystem(timesteps) + fs.add_elements( + # Buses + fx.Bus('Gas', carrier='gas'), + fx.Bus('Electricity', carrier='electricity'), + fx.Bus('Heat', carrier='heat'), + # Effects + fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True), + fx.Effect('CO2', 'kg', 'CO2 Emissions'), + # Gas supply + fx.Source( + 'GasGrid', + outputs=[fx.Flow('Gas', bus='Gas', size=300, effects_per_flow_hour={'costs': gas_price, 'CO2': gas_co2})], + ), + # Electricity grid (import and export) + fx.Source( + 'ElectricityImport', + outputs=[ + fx.Flow( + 'El', + bus='Electricity', + size=100, + effects_per_flow_hour={'costs': electricity_price, 'CO2': electricity_co2}, + ) + ], + ), + fx.Sink( + 'ElectricityExport', + inputs=[ + fx.Flow('El', bus='Electricity', size=50, effects_per_flow_hour={'costs': -electricity_price * 0.8}) + ], + ), + # CHP with piecewise efficiency (efficiency varies with load) + fx.LinearConverter( + 'CHP', + inputs=[fx.Flow('Gas', bus='Gas', size=200)], + outputs=[fx.Flow('El', bus='Electricity', size=80), fx.Flow('Heat', bus='Heat', size=85)], + piecewise_conversion=fx.PiecewiseConversion( + { + 'Gas': fx.Piecewise( + [ + fx.Piece(start=80, end=160), # Part load + fx.Piece(start=160, end=200), # Full load + ] + ), + 'El': fx.Piecewise( + [ + fx.Piece(start=25, end=60), # ~31-38% electrical efficiency + fx.Piece(start=60, end=80), # ~38-40% electrical efficiency + ] + ), + 'Heat': fx.Piecewise( + [ + fx.Piece(start=35, end=70), # ~44% thermal efficiency + fx.Piece(start=70, end=85), # ~43% thermal efficiency + ] + ), + } + ), + status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 2}), + ), + # Heat pump (with investment) + fx.linear_converters.HeatPump( + 'HeatPump', + thermal_flow=fx.Flow( + 'Heat', + bus='Heat', + size=fx.InvestParameters( + effects_of_investment={'costs': 500}, + effects_of_investment_per_size={'costs': 100}, + maximum_size=60, + ), + ), + electrical_flow=fx.Flow('El', bus='Electricity'), + cop=3.5, + ), + # Backup boiler + fx.linear_converters.Boiler( + 'BackupBoiler', + thermal_flow=fx.Flow('Heat', bus='Heat', size=80), + fuel_flow=fx.Flow('Gas', bus='Gas'), + thermal_efficiency=0.90, + ), + # Thermal storage (with investment) + fx.Storage( + 'HeatStorage', + capacity_in_flow_hours=fx.InvestParameters( + effects_of_investment={'costs': 200}, + effects_of_investment_per_size={'costs': 10}, + maximum_size=300, + ), + eta_charge=0.95, + eta_discharge=0.95, + charging=fx.Flow('Charge', bus='Heat', size=50), + discharging=fx.Flow('Discharge', bus='Heat', size=50), + ), + # Demands + fx.Sink('HeatDemand', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]), + fx.Sink( + 'ElDemand', inputs=[fx.Flow('El', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)] + ), + ) + return fs + + +def create_multiperiod_system() -> fx.FlowSystem: + """Create a system with multiple periods and scenarios. + + Same structure as simple system but with: + - 3 planning periods (years 2024, 2025, 2026) + - 2 scenarios (high demand, low demand) + + Each period: 48 hours (2 days representative) + """ + timesteps = pd.date_range('2024-01-01', periods=48, freq='h') + hour_of_day = np.arange(48) % 24 + + # Period definitions (years) + periods = pd.Index([2024, 2025, 2026], name='period') + + # Scenario definitions + scenarios = pd.Index(['high_demand', 'low_demand'], name='scenario') + scenario_weights = np.array([0.3, 0.7]) + + # Base demand pattern (hourly) + base_pattern = np.where((hour_of_day >= 7) & (hour_of_day <= 18), 80.0, 35.0) + + # Scenario-specific scaling + np.random.seed(42) + high_demand = base_pattern * 1.2 + np.random.normal(0, 5, 48) + low_demand = base_pattern * 0.85 + np.random.normal(0, 3, 48) + + # Create DataFrame with scenario columns + heat_demand = pd.DataFrame( + { + 'high_demand': np.clip(high_demand, 20, 120), + 'low_demand': np.clip(low_demand, 15, 90), + }, + index=timesteps, + ) + + # Gas price varies by period (rising costs) + gas_prices = np.array([0.06, 0.08, 0.10]) # Per period + + fs = fx.FlowSystem( + timesteps, + periods=periods, + scenarios=scenarios, + scenario_weights=scenario_weights, + ) + + fs.add_elements( + fx.Bus('Gas', carrier='gas'), + fx.Bus('Heat', carrier='heat'), + fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True), + fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_prices)]), + fx.linear_converters.Boiler( + 'Boiler', + thermal_efficiency=0.92, + thermal_flow=fx.Flow( + 'Heat', + bus='Heat', + size=fx.InvestParameters( + effects_of_investment={'costs': 1000}, + effects_of_investment_per_size={'costs': 50}, + maximum_size=250, + ), + ), + fuel_flow=fx.Flow('Gas', bus='Gas'), + ), + fx.Storage( + 'ThermalStorage', + capacity_in_flow_hours=fx.InvestParameters( + effects_of_investment={'costs': 500}, + effects_of_investment_per_size={'costs': 15}, + maximum_size=400, + ), + eta_charge=0.98, + eta_discharge=0.98, + charging=fx.Flow('Charge', bus='Heat', size=80), + discharging=fx.Flow('Discharge', bus='Heat', size=80), + ), + fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]), + ) + return fs + + +def main(): + """Generate all example systems and save to netCDF.""" + solver = fx.solvers.HighsSolver(log_to_console=False) + + systems = [ + ('simple_system', create_simple_system), + ('complex_system', create_complex_system), + ('multiperiod_system', create_multiperiod_system), + ] + + for name, create_func in systems: + print(f'Creating {name}...') + fs = create_func() + + print(' Optimizing...') + fs.optimize(solver) + + output_path = OUTPUT_DIR / f'{name}.nc4' + print(f' Saving to {output_path}...') + fs.to_netcdf(output_path, overwrite=True) + + print(f' Done. Objective: {fs.solution["objective"].item():.2f}') + print() + + print('All systems generated successfully!') + + +if __name__ == '__main__': + main() diff --git a/docs/notebooks/index.md b/docs/notebooks/index.md index b8003f6bb..210aee051 100644 --- a/docs/notebooks/index.md +++ b/docs/notebooks/index.md @@ -2,43 +2,50 @@ Learn flixopt through practical examples organized by topic. Each notebook includes a real-world user story and progressively builds your understanding. -## Getting Started +## Basics | Notebook | Description | |----------|-------------| | [01-Quickstart](01-quickstart.ipynb) | Minimal working example - heat a workshop with a gas boiler | | [02-Heat System](02-heat-system.ipynb) | District heating with thermal storage and time-varying prices | -## Investment & Planning +## Investment | Notebook | Description | |----------|-------------| -| [03-Investment Optimization](03-investment-optimization.ipynb) | Size a solar heating system - let the optimizer decide equipment sizes | -| [04-Operational Constraints](04-operational-constraints.ipynb) | Industrial boiler with startup costs, minimum uptime, and load constraints | +| [03-Sizing](03-investment-optimization.ipynb) | Size a solar heating system - let the optimizer decide equipment sizes | +| [04-Constraints](04-operational-constraints.ipynb) | Industrial boiler with startup costs, minimum uptime, and load constraints | -## Advanced Modeling +## Advanced | Notebook | Description | |----------|-------------| -| [05-Multi-Carrier Systems](05-multi-carrier-system.ipynb) | Hospital with CHP producing both electricity and heat | -| [06-Piecewise Efficiency](06-piecewise-efficiency.ipynb) | Heat pump with temperature-dependent COP and part-load curves | +| [05-Multi-Carrier](05-multi-carrier-system.ipynb) | Hospital with CHP producing both electricity and heat | +| [06-Piecewise](06-piecewise-efficiency.ipynb) | Heat pump with temperature-dependent COP and part-load curves | -## Scenarios & Scaling +## Scaling | Notebook | Description | |----------|-------------| -| [07-Scenarios and Periods](07-scenarios-and-periods.ipynb) | Multi-year planning with uncertain demand scenarios | -| [08-Large-Scale Optimization](08-large-scale-optimization.ipynb) | Speed up large problems with resampling and two-stage optimization | +| [07-Scenarios](07-scenarios-and-periods.ipynb) | Multi-year planning with uncertain demand scenarios | +| [08-Large-Scale](08-large-scale-optimization.ipynb) | Speed up large problems with resampling and two-stage optimization | -## Key Concepts by Notebook +## Results + +| Notebook | Description | +|----------|-------------| +| [09-Plotting](09-plotting-and-data-access.ipynb) | Access optimization results and create visualizations | + +## Key Concepts | Concept | Introduced In | |---------|---------------| -| `FlowSystem`, `Bus`, `Flow` | 01-Quickstart | -| `Storage`, time-varying prices | 02-Heat System | -| `InvestParameters`, optimal sizing | 03-Investment | -| `StatusParameters`, startup costs | 04-Operational | -| Multi-carrier, CHP | 05-Multi-Carrier | -| `Piecewise`, variable efficiency | 06-Piecewise | -| Periods, scenarios, weights | 07-Scenarios | -| `transform.resample()`, `fix_sizes()` | 08-Large-Scale | +| `FlowSystem`, `Bus`, `Flow` | Quickstart | +| `Storage`, time-varying prices | Heat System | +| `InvestParameters`, optimal sizing | Sizing | +| `StatusParameters`, startup costs | Constraints | +| Multi-carrier, CHP | Multi-Carrier | +| `Piecewise`, variable efficiency | Piecewise | +| Periods, scenarios, weights | Scenarios | +| `transform.resample()`, `fix_sizes()` | Large-Scale | +| `statistics`, `topology`, plotting | Plotting | diff --git a/flixopt/components.py b/flixopt/components.py index c5913c8e2..267c144af 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -265,7 +265,9 @@ class Storage(Component): charging: Incoming flow for loading the storage. discharging: Outgoing flow for unloading the storage. capacity_in_flow_hours: Storage capacity in flow-hours (kWh, m³, kg). - Scalar for fixed size or InvestParameters for optimization. + Scalar for fixed size, InvestParameters for optimization, or None (unbounded). + Default: None (unbounded capacity). When using InvestParameters, + maximum_size (or fixed_size) must be explicitly set for proper model scaling. relative_minimum_charge_state: Minimum charge state (0-1). Default: 0. relative_maximum_charge_state: Maximum charge state (0-1). Default: 1. initial_charge_state: Charge at start. Numeric or 'equals_final'. Default: 0. @@ -366,6 +368,11 @@ class Storage(Component): variables enforce mutual exclusivity, increasing solution time but preventing unrealistic simultaneous charging and discharging. + **Unbounded capacity**: When capacity_in_flow_hours is None (default), the storage has + unlimited capacity. Note that prevent_simultaneous_charge_and_discharge requires the + charging and discharging flows to have explicit sizes. Use prevent_simultaneous_charge_and_discharge=False + with unbounded storages, or set flow sizes explicitly. + **Units**: Flow rates and charge states are related by the concept of 'flow hours' (=flow_rate * time). With flow rates in kW, the charge state is therefore (usually) kWh. With flow rates in m3/h, the charge state is therefore in m3. @@ -378,7 +385,7 @@ def __init__( label: str, charging: Flow, discharging: Flow, - capacity_in_flow_hours: Numeric_PS | InvestParameters, + capacity_in_flow_hours: Numeric_PS | InvestParameters | None = None, relative_minimum_charge_state: Numeric_TPS = 0, relative_maximum_charge_state: Numeric_TPS = 1, initial_charge_state: Numeric_PS | Literal['equals_final'] = 0, @@ -485,31 +492,58 @@ def _plausibility_checks(self) -> None: raise PlausibilityError(f'initial_charge_state has undefined value: {self.initial_charge_state}') initial_equals_final = True - # Use new InvestParameters methods to get capacity bounds - if isinstance(self.capacity_in_flow_hours, InvestParameters): - minimum_capacity = self.capacity_in_flow_hours.minimum_or_fixed_size - maximum_capacity = self.capacity_in_flow_hours.maximum_or_fixed_size - else: - maximum_capacity = self.capacity_in_flow_hours - minimum_capacity = self.capacity_in_flow_hours - - # Initial capacity should not constraint investment decision - minimum_initial_capacity = maximum_capacity * self.relative_minimum_charge_state.isel(time=0) - maximum_initial_capacity = minimum_capacity * self.relative_maximum_charge_state.isel(time=0) - - # Only perform numeric comparisons if not using 'equals_final' - if not initial_equals_final: - if (self.initial_charge_state > maximum_initial_capacity).any(): + # Capacity is required when using non-default relative bounds + if self.capacity_in_flow_hours is None: + if np.any(self.relative_minimum_charge_state > 0): + raise PlausibilityError( + f'Storage "{self.label_full}" has relative_minimum_charge_state > 0 but no capacity_in_flow_hours. ' + f'A capacity is required because the lower bound is capacity * relative_minimum_charge_state.' + ) + if np.any(self.relative_maximum_charge_state < 1): + raise PlausibilityError( + f'Storage "{self.label_full}" has relative_maximum_charge_state < 1 but no capacity_in_flow_hours. ' + f'A capacity is required because the upper bound is capacity * relative_maximum_charge_state.' + ) + if self.relative_minimum_final_charge_state is not None: raise PlausibilityError( - f'{self.label_full}: {self.initial_charge_state=} ' - f'is constraining the investment decision. Chosse a value above {maximum_initial_capacity}' + f'Storage "{self.label_full}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. ' + f'A capacity is required for relative final charge state constraints.' ) - if (self.initial_charge_state < minimum_initial_capacity).any(): + if self.relative_maximum_final_charge_state is not None: raise PlausibilityError( - f'{self.label_full}: {self.initial_charge_state=} ' - f'is constraining the investment decision. Chosse a value below {minimum_initial_capacity}' + f'Storage "{self.label_full}" has relative_maximum_final_charge_state but no capacity_in_flow_hours. ' + f'A capacity is required for relative final charge state constraints.' ) + # Skip capacity-related checks if capacity is None (unbounded) + if self.capacity_in_flow_hours is not None: + # Use new InvestParameters methods to get capacity bounds + if isinstance(self.capacity_in_flow_hours, InvestParameters): + minimum_capacity = self.capacity_in_flow_hours.minimum_or_fixed_size + maximum_capacity = self.capacity_in_flow_hours.maximum_or_fixed_size + else: + maximum_capacity = self.capacity_in_flow_hours + minimum_capacity = self.capacity_in_flow_hours + + # Initial charge state should not constrain investment decision + # If initial > (min_cap * rel_max), investment is forced to increase capacity + # If initial < (max_cap * rel_min), investment is forced to decrease capacity + min_initial_at_max_capacity = maximum_capacity * self.relative_minimum_charge_state.isel(time=0) + max_initial_at_min_capacity = minimum_capacity * self.relative_maximum_charge_state.isel(time=0) + + # Only perform numeric comparisons if not using 'equals_final' + if not initial_equals_final: + if (self.initial_charge_state > max_initial_at_min_capacity).any(): + raise PlausibilityError( + f'{self.label_full}: {self.initial_charge_state=} ' + f'is constraining the investment decision. Choose a value <= {max_initial_at_min_capacity}.' + ) + if (self.initial_charge_state < min_initial_at_max_capacity).any(): + raise PlausibilityError( + f'{self.label_full}: {self.initial_charge_state=} ' + f'is constraining the investment decision. Choose a value >= {min_initial_at_max_capacity}.' + ) + if self.balanced: if not isinstance(self.charging.size, InvestParameters) or not isinstance( self.discharging.size, InvestParameters @@ -518,13 +552,13 @@ def _plausibility_checks(self) -> None: f'Balancing charging and discharging Flows in {self.label_full} is only possible with Investments.' ) - if (self.charging.size.minimum_size > self.discharging.size.maximum_size).any() or ( - self.charging.size.maximum_size < self.discharging.size.minimum_size + if (self.charging.size.minimum_or_fixed_size > self.discharging.size.maximum_or_fixed_size).any() or ( + self.charging.size.maximum_or_fixed_size < self.discharging.size.minimum_or_fixed_size ).any(): raise PlausibilityError( f'Balancing charging and discharging Flows in {self.label_full} need compatible minimum and maximum sizes.' - f'Got: {self.charging.size.minimum_size=}, {self.charging.size.maximum_size=} and ' - f'{self.discharging.size.minimum_size=}, {self.discharging.size.maximum_size=}.' + f'Got: {self.charging.size.minimum_or_fixed_size=}, {self.charging.size.maximum_or_fixed_size=} and ' + f'{self.discharging.size.minimum_or_fixed_size=}, {self.discharging.size.maximum_or_fixed_size=}.' ) def __repr__(self) -> str: @@ -705,8 +739,8 @@ def _plausibility_checks(self): ).any(): raise ValueError( f'Balanced Transmission needs compatible minimum and maximum sizes.' - f'Got: {self.in1.size.minimum_size=}, {self.in1.size.maximum_size=}, {self.in1.size.fixed_size=} and ' - f'{self.in2.size.minimum_size=}, {self.in2.size.maximum_size=}, {self.in2.size.fixed_size=}.' + f'Got: {self.in1.size.minimum_or_fixed_size=}, {self.in1.size.maximum_or_fixed_size=} and ' + f'{self.in2.size.minimum_or_fixed_size=}, {self.in2.size.maximum_or_fixed_size=}.' ) def create_model(self, model) -> TransmissionModel: @@ -938,15 +972,18 @@ def _initial_and_final_charge_state(self): @property def _absolute_charge_state_bounds(self) -> tuple[xr.DataArray, xr.DataArray]: relative_lower_bound, relative_upper_bound = self._relative_charge_state_bounds - if not isinstance(self.element.capacity_in_flow_hours, InvestParameters): + if self.element.capacity_in_flow_hours is None: + # Unbounded storage: lower bound is 0, upper bound is infinite + return (0, np.inf) + elif isinstance(self.element.capacity_in_flow_hours, InvestParameters): return ( - relative_lower_bound * self.element.capacity_in_flow_hours, - relative_upper_bound * self.element.capacity_in_flow_hours, + relative_lower_bound * self.element.capacity_in_flow_hours.minimum_or_fixed_size, + relative_upper_bound * self.element.capacity_in_flow_hours.maximum_or_fixed_size, ) else: return ( - relative_lower_bound * self.element.capacity_in_flow_hours.minimum_size, - relative_upper_bound * self.element.capacity_in_flow_hours.maximum_size, + relative_lower_bound * self.element.capacity_in_flow_hours, + relative_upper_bound * self.element.capacity_in_flow_hours, ) @property diff --git a/flixopt/elements.py b/flixopt/elements.py index d97ff3ffb..2933eb95a 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -138,6 +138,17 @@ def _check_unique_flow_labels(self): def _plausibility_checks(self) -> None: self._check_unique_flow_labels() + # Component with status_parameters requires all flows to have sizes set + # (status_parameters are propagated to flows in _do_modeling, which need sizes for big-M constraints) + if self.status_parameters is not None: + flows_without_size = [flow.label for flow in self.inputs + self.outputs if flow.size is None] + if flows_without_size: + raise PlausibilityError( + f'Component "{self.label_full}" has status_parameters, but the following flows have no size: ' + f'{flows_without_size}. All flows need explicit sizes when the component uses status_parameters ' + f'(required for big-M constraints).' + ) + def _connect_flows(self): # Inputs for flow in self.inputs: @@ -344,7 +355,7 @@ class Flow(Element): Args: label: Unique flow identifier within its component. bus: Bus label this flow connects to. - size: Flow capacity. Scalar, InvestParameters, or None (uses CONFIG.Modeling.big). + size: Flow capacity. Scalar, InvestParameters, or None (unbounded). relative_minimum: Minimum flow rate as fraction of size (0-1). Default: 0. relative_maximum: Maximum flow rate as fraction of size. Default: 1. load_factor_min: Minimum average utilization (0-1). Default: 0. @@ -445,7 +456,8 @@ class Flow(Element): `relative_maximum` for upper bounds on optimization variables. Notes: - - Default size (CONFIG.Modeling.big) is used when size=None + - size=None means unbounded (no capacity constraint) + - size must be set when using status_parameters or fixed_relative_profile - list inputs for previous_flow_rate are converted to NumPy arrays - Flow direction is determined by component input/output designation @@ -460,7 +472,7 @@ def __init__( self, label: str, bus: str, - size: Numeric_PS | InvestParameters = None, + size: Numeric_PS | InvestParameters | None = None, fixed_relative_profile: Numeric_TPS | None = None, relative_minimum: Numeric_TPS = 0, relative_maximum: Numeric_TPS = 1, @@ -476,7 +488,7 @@ def __init__( meta_data: dict | None = None, ): super().__init__(label, meta_data=meta_data) - self.size = CONFIG.Modeling.big if size is None else size + self.size = size self.relative_minimum = relative_minimum self.relative_maximum = relative_maximum self.fixed_relative_profile = fixed_relative_profile @@ -549,7 +561,7 @@ def transform_data(self) -> None: self.status_parameters.transform_data() if isinstance(self.size, InvestParameters): self.size.transform_data() - else: + elif self.size is not None: self.size = self._fit_coords(f'{self.prefix}|size', self.size, dims=['period', 'scenario']) def _plausibility_checks(self) -> None: @@ -557,13 +569,43 @@ def _plausibility_checks(self) -> None: if (self.relative_minimum > self.relative_maximum).any(): raise PlausibilityError(self.label_full + ': Take care, that relative_minimum <= relative_maximum!') - if not isinstance(self.size, InvestParameters) and ( - np.any(self.size == CONFIG.Modeling.big) and self.fixed_relative_profile is not None - ): # Default Size --> Most likely by accident - logger.warning( - f'Flow "{self.label_full}" has no size assigned, but a "fixed_relative_profile". ' - f'The default size is {CONFIG.Modeling.big}. As "flow_rate = size * fixed_relative_profile", ' - f'the resulting flow_rate will be very high. To fix this, assign a size to the Flow {self}.' + # Size is required when using StatusParameters (for big-M constraints) + if self.status_parameters is not None and self.size is None: + raise PlausibilityError( + f'Flow "{self.label_full}" has status_parameters but no size defined. ' + f'A size is required when using status_parameters to bound the flow rate.' + ) + + if self.size is None and self.fixed_relative_profile is not None: + raise PlausibilityError( + f'Flow "{self.label_full}" has a fixed_relative_profile but no size defined. ' + f'A size is required because flow_rate = size * fixed_relative_profile.' + ) + + # Size is required when using non-default relative bounds (flow_rate = size * relative_bound) + if self.size is None and np.any(self.relative_minimum > 0): + raise PlausibilityError( + f'Flow "{self.label_full}" has relative_minimum > 0 but no size defined. ' + f'A size is required because the lower bound is size * relative_minimum.' + ) + + if self.size is None and np.any(self.relative_maximum < 1): + raise PlausibilityError( + f'Flow "{self.label_full}" has relative_maximum != 1 but no size defined. ' + f'A size is required because the upper bound is size * relative_maximum.' + ) + + # Size is required for load factor constraints (total_flow_hours / size) + if self.size is None and self.load_factor_min is not None: + raise PlausibilityError( + f'Flow "{self.label_full}" has load_factor_min but no size defined. ' + f'A size is required because the constraint is total_flow_hours >= size * load_factor_min * hours.' + ) + + if self.size is None and self.load_factor_max is not None: + raise PlausibilityError( + f'Flow "{self.label_full}" has load_factor_max but no size defined. ' + f'A size is required because the constraint is total_flow_hours <= size * load_factor_max * hours.' ) if self.fixed_relative_profile is not None and self.status_parameters is not None: @@ -829,15 +871,18 @@ def absolute_flow_rate_bounds(self) -> tuple[xr.DataArray, xr.DataArray]: if not self.with_status: if not self.with_investment: # Basic case without investment and without Status - lb = lb_relative * self.element.size + if self.element.size is not None: + lb = lb_relative * self.element.size elif self.with_investment and self.element.size.mandatory: # With mandatory Investment lb = lb_relative * self.element.size.minimum_or_fixed_size if self.with_investment: ub = ub_relative * self.element.size.maximum_or_fixed_size - else: + elif self.element.size is not None: ub = ub_relative * self.element.size + else: + ub = np.inf # Unbounded when size is None return lb, ub diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index fb37fb002..1e0503e52 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -711,6 +711,9 @@ def from_dataset(cls, ds: xr.Dataset) -> FlowSystem: carrier = cls._resolve_reference_structure(carrier_data, {}) flow_system._carriers.add(carrier) + # Reconnect network to populate bus inputs/outputs (not stored in NetCDF). + flow_system.connect_and_transform() + return flow_system def to_netcdf(self, path: str | pathlib.Path, compression: int = 5, overwrite: bool = False): @@ -1080,6 +1083,8 @@ def add_elements(self, *elements: Element) -> None: 'Adding elements to a FlowSystem with an existing model. The model will be invalidated.', stacklevel=2, ) + # Always invalidate when adding elements to ensure new elements get transformed + if self.model is not None or self._connected_and_transformed: self._invalidate_model() for new_element in list(elements): @@ -1146,6 +1151,8 @@ def add_carriers(self, *carriers: Carrier) -> None: 'Adding carriers to a FlowSystem with an existing model. The model will be invalidated.', stacklevel=2, ) + # Always invalidate when adding carriers to ensure proper re-transformation + if self.model is not None or self._connected_and_transformed: self._invalidate_model() for carrier in list(carriers): diff --git a/flixopt/interface.py b/flixopt/interface.py index 0a9e9424c..b11576ef1 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -882,7 +882,7 @@ class InvestParameters(Interface): fixed_size: Creates binary decision at this exact size. None allows continuous sizing. minimum_size: Lower bound for continuous sizing. Default: CONFIG.Modeling.epsilon. Ignored if fixed_size is specified. - maximum_size: Upper bound for continuous sizing. Default: CONFIG.Modeling.big. + maximum_size: Upper bound for continuous sizing. Required if fixed_size is not set. Ignored if fixed_size is specified. mandatory: Controls whether investment is required. When True, forces investment to occur (useful for mandatory upgrades or replacement decisions). @@ -1065,7 +1065,7 @@ def __init__( ) self.piecewise_effects_of_investment = piecewise_effects_of_investment self.minimum_size = minimum_size if minimum_size is not None else CONFIG.Modeling.epsilon - self.maximum_size = maximum_size if maximum_size is not None else CONFIG.Modeling.big # default maximum + self.maximum_size = maximum_size self.linked_periods = linked_periods def link_to_flow_system(self, flow_system, prefix: str = '') -> None: @@ -1075,6 +1075,12 @@ def link_to_flow_system(self, flow_system, prefix: str = '') -> None: self.piecewise_effects_of_investment.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseEffects')) def transform_data(self) -> None: + # Validate that either fixed_size or maximum_size is set + if self.fixed_size is None and self.maximum_size is None: + raise ValueError( + f'InvestParameters in "{self.prefix}" requires either fixed_size or maximum_size to be set. ' + f'An upper bound is needed to properly scale the optimization model.' + ) self.effects_of_investment = self._fit_effect_coords( prefix=self.prefix, effect_values=self.effects_of_investment, diff --git a/flixopt/io.py b/flixopt/io.py index 38c3e6286..f46cd8723 100644 --- a/flixopt/io.py +++ b/flixopt/io.py @@ -1031,7 +1031,7 @@ def build_repr_from_init( excluded_params: Set of parameter names to exclude (e.g., {'self', 'inputs', 'outputs'}) Default excludes 'self', 'label', and 'kwargs' label_as_positional: If True and 'label' param exists, show it as first positional arg - skip_default_size: If True, skip 'size' parameter when it equals CONFIG.Modeling.big + skip_default_size: Deprecated. Previously skipped size=CONFIG.Modeling.big, now size=None is default. Returns: Formatted repr string like: ClassName("label", param=value) diff --git a/flixopt/structure.py b/flixopt/structure.py index 590fe8ba0..88fd9ce31 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -322,6 +322,11 @@ class Interface: transform_data(): Transform data to match FlowSystem dimensions """ + # Class-level defaults for attributes set by link_to_flow_system() + # These provide type hints and default values without requiring __init__ in subclasses + _flow_system: FlowSystem | None = None + _prefix: str = '' + def transform_data(self) -> None: """Transform the data of the interface to match the FlowSystem's dimensions. @@ -340,7 +345,7 @@ def transform_data(self) -> None: @property def prefix(self) -> str: """The prefix used for naming transformed data (e.g., 'Boiler(Q_th)|status_parameters').""" - return getattr(self, '_prefix', '') + return self._prefix def _sub_prefix(self, name: str) -> str: """Build a prefix for a nested interface by appending name to current prefix.""" @@ -399,7 +404,7 @@ def flow_system(self) -> FlowSystem: For Elements, this is set during add_elements(). For parameter classes, this is set recursively when the parent Element is registered. """ - if not hasattr(self, '_flow_system') or self._flow_system is None: + if self._flow_system is None: raise RuntimeError( f'{self.__class__.__name__} is not linked to a FlowSystem. ' f'Ensure the parent element is registered via flow_system.add_elements() first.' diff --git a/flixopt/transform_accessor.py b/flixopt/transform_accessor.py index 45a33c712..eaec1a3b6 100644 --- a/flixopt/transform_accessor.py +++ b/flixopt/transform_accessor.py @@ -289,6 +289,7 @@ def resample( method: Literal['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] = 'mean', hours_of_last_timestep: int | float | None = None, hours_of_previous_timesteps: int | float | np.ndarray | None = None, + fill_gaps: Literal['ffill', 'bfill', 'interpolate'] | None = None, **kwargs: Any, ) -> FlowSystem: """ @@ -304,11 +305,18 @@ def resample( If None, computed from the last time interval. hours_of_previous_timesteps: Duration of previous timesteps after resampling. If None, computed from the first time interval. Can be a scalar or array. + fill_gaps: Strategy for filling gaps (NaN values) that arise when resampling + irregular timesteps to regular intervals. Options: 'ffill' (forward fill), + 'bfill' (backward fill), 'interpolate' (linear interpolation). + If None (default), raises an error when gaps are detected. **kwargs: Additional arguments passed to xarray.resample() Returns: FlowSystem: New resampled FlowSystem (no solution). + Raises: + ValueError: If resampling creates gaps and fill_gaps is not specified. + Examples: >>> # Resample to 4-hour intervals >>> fs_4h = flow_system.transform.resample(time='4h', method='mean') @@ -329,6 +337,7 @@ def resample( method=method, hours_of_last_timestep=hours_of_last_timestep, hours_of_previous_timesteps=hours_of_previous_timesteps, + fill_gaps=fill_gaps, **kwargs, ) return FlowSystem.from_dataset(ds) # from_dataset doesn't include solution @@ -443,6 +452,7 @@ def _dataset_resample( method: Literal['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] = 'mean', hours_of_last_timestep: int | float | None = None, hours_of_previous_timesteps: int | float | np.ndarray | None = None, + fill_gaps: Literal['ffill', 'bfill', 'interpolate'] | None = None, **kwargs: Any, ) -> xr.Dataset: """ @@ -454,10 +464,17 @@ def _dataset_resample( method: Resampling method (e.g., 'mean', 'sum', 'first') hours_of_last_timestep: Duration of the last timestep after resampling. hours_of_previous_timesteps: Duration of previous timesteps after resampling. + fill_gaps: Strategy for filling gaps (NaN values) that arise when resampling + irregular timesteps to regular intervals. Options: 'ffill' (forward fill), + 'bfill' (backward fill), 'interpolate' (linear interpolation). + If None (default), raises an error when gaps are detected. **kwargs: Additional arguments passed to xarray.resample() Returns: xr.Dataset: Resampled dataset + + Raises: + ValueError: If resampling creates gaps and fill_gaps is not specified. """ from .flow_system import FlowSystem @@ -473,6 +490,30 @@ def _dataset_resample( time_dataset = dataset[time_var_names] resampled_time_dataset = cls._resample_by_dimension_groups(time_dataset, freq, method, **kwargs) + # Handle NaN values that may arise from resampling irregular timesteps to regular intervals. + # When irregular data (e.g., [00:00, 01:00, 03:00]) is resampled to regular intervals (e.g., '1h'), + # bins without data (e.g., 02:00) get NaN. + if resampled_time_dataset.isnull().any().to_array().any(): + if fill_gaps is None: + # Find which variables have NaN values for a helpful error message + vars_with_nans = [ + name for name in resampled_time_dataset.data_vars if resampled_time_dataset[name].isnull().any() + ] + raise ValueError( + f'Resampling created gaps (NaN values) in variables: {vars_with_nans}. ' + f'This typically happens when resampling irregular timesteps to regular intervals. ' + f"Specify fill_gaps='ffill', 'bfill', or 'interpolate' to handle gaps, " + f'or resample to a coarser frequency.' + ) + elif fill_gaps == 'ffill': + resampled_time_dataset = resampled_time_dataset.ffill(dim='time').bfill(dim='time') + elif fill_gaps == 'bfill': + resampled_time_dataset = resampled_time_dataset.bfill(dim='time').ffill(dim='time') + elif fill_gaps == 'interpolate': + resampled_time_dataset = resampled_time_dataset.interpolate_na(dim='time', method='linear') + # Handle edges that can't be interpolated + resampled_time_dataset = resampled_time_dataset.ffill(dim='time').bfill(dim='time') + if non_time_var_names: non_time_dataset = dataset[non_time_var_names] result = xr.merge([resampled_time_dataset, non_time_dataset]) diff --git a/mkdocs.yml b/mkdocs.yml index f414c1ab4..efaaf5dea 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -52,18 +52,20 @@ nav: - Examples: - Overview: notebooks/index.md - - Getting Started: + - Basics: - Quickstart: notebooks/01-quickstart.ipynb - - Heat System with Storage: notebooks/02-heat-system.ipynb - - Investment & Planning: - - Investment Optimization: notebooks/03-investment-optimization.ipynb - - Operational Constraints: notebooks/04-operational-constraints.ipynb - - Advanced Modeling: - - Multi-Carrier Systems: notebooks/05-multi-carrier-system.ipynb - - Piecewise Efficiency: notebooks/06-piecewise-efficiency.ipynb - - Scenarios & Scaling: - - Scenarios and Periods: notebooks/07-scenarios-and-periods.ipynb - - Large-Scale Optimization: notebooks/08-large-scale-optimization.ipynb + - Heat System: notebooks/02-heat-system.ipynb + - Investment: + - Sizing: notebooks/03-investment-optimization.ipynb + - Constraints: notebooks/04-operational-constraints.ipynb + - Advanced: + - Multi-Carrier: notebooks/05-multi-carrier-system.ipynb + - Piecewise: notebooks/06-piecewise-efficiency.ipynb + - Scaling: + - Scenarios: notebooks/07-scenarios-and-periods.ipynb + - Large-Scale: notebooks/08-large-scale-optimization.ipynb + - Results: + - Plotting: notebooks/09-plotting-and-data-access.ipynb - API Reference: api-reference/ diff --git a/tests/conftest.py b/tests/conftest.py index 49734911a..ee2c0f4e2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -213,10 +213,10 @@ def piecewise(): """Piecewise converter from flow_system_piecewise_conversion""" return fx.LinearConverter( 'KWK', - inputs=[fx.Flow('Q_fu', bus='Gas')], + inputs=[fx.Flow('Q_fu', bus='Gas', size=200)], outputs=[ fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10), - fx.Flow('Q_th', bus='Fernwärme'), + fx.Flow('Q_th', bus='Fernwärme', size=100), ], piecewise_conversion=fx.PiecewiseConversion( { @@ -233,10 +233,10 @@ def segments(timesteps_length): """Segments converter with time-varying piecewise conversion""" return fx.LinearConverter( 'KWK', - inputs=[fx.Flow('Q_fu', bus='Gas')], + inputs=[fx.Flow('Q_fu', bus='Gas', size=200)], outputs=[ fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10), - fx.Flow('Q_th', bus='Fernwärme'), + fx.Flow('Q_th', bus='Fernwärme', size=100), ], piecewise_conversion=fx.PiecewiseConversion( { @@ -614,12 +614,12 @@ def flow_system_long(): ), fx.linear_converters.CHP( 'BHKW2', - thermal_efficiency=0.58, - electrical_efficiency=0.22, + thermal_efficiency=(eta_th := 0.58), + electrical_efficiency=(eta_el := 0.22), status_parameters=fx.StatusParameters(effects_per_startup=24000), - electrical_flow=fx.Flow('P_el', bus='Strom'), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme'), - fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=288, relative_minimum=87 / 288), + fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=(fuel_size := 288), relative_minimum=87 / fuel_size), + electrical_flow=fx.Flow('P_el', bus='Strom', size=fuel_size * eta_el), + thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=fuel_size * eta_th), ), fx.Storage( 'Speicher', diff --git a/tests/deprecated/conftest.py b/tests/deprecated/conftest.py index b6ec5ac4b..65434f04c 100644 --- a/tests/deprecated/conftest.py +++ b/tests/deprecated/conftest.py @@ -216,10 +216,10 @@ def piecewise(): """Piecewise converter from flow_system_piecewise_conversion""" return fx.LinearConverter( 'KWK', - inputs=[fx.Flow('Q_fu', bus='Gas')], + inputs=[fx.Flow('Q_fu', bus='Gas', size=200)], outputs=[ fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10), - fx.Flow('Q_th', bus='Fernwärme'), + fx.Flow('Q_th', bus='Fernwärme', size=100), ], piecewise_conversion=fx.PiecewiseConversion( { @@ -236,10 +236,10 @@ def segments(timesteps_length): """Segments converter with time-varying piecewise conversion""" return fx.LinearConverter( 'KWK', - inputs=[fx.Flow('Q_fu', bus='Gas')], + inputs=[fx.Flow('Q_fu', bus='Gas', size=200)], outputs=[ fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10), - fx.Flow('Q_th', bus='Fernwärme'), + fx.Flow('Q_th', bus='Fernwärme', size=100), ], piecewise_conversion=fx.PiecewiseConversion( { @@ -613,12 +613,12 @@ def flow_system_long(): ), fx.linear_converters.CHP( 'BHKW2', - thermal_efficiency=0.58, - electrical_efficiency=0.22, + thermal_efficiency=(eta_th := 0.58), + electrical_efficiency=(eta_el := 0.22), status_parameters=fx.StatusParameters(effects_per_startup=24000), - electrical_flow=fx.Flow('P_el', bus='Strom'), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme'), - fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=288, relative_minimum=87 / 288), + fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=(fuel_size := 288), relative_minimum=87 / fuel_size), + electrical_flow=fx.Flow('P_el', bus='Strom', size=fuel_size * eta_el), + thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=fuel_size * eta_th), ), fx.Storage( 'Speicher', diff --git a/tests/deprecated/test_component.py b/tests/deprecated/test_component.py index 8cde784c9..497a5c3aa 100644 --- a/tests/deprecated/test_component.py +++ b/tests/deprecated/test_component.py @@ -31,12 +31,12 @@ def test_component(self, basic_flow_system_linopy_coords, coords_config): """Test that flow model constraints are correctly generated.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config inputs = [ - fx.Flow('In1', 'Fernwärme', relative_minimum=np.ones(10) * 0.1), - fx.Flow('In2', 'Fernwärme', relative_minimum=np.ones(10) * 0.1), + fx.Flow('In1', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1), + fx.Flow('In2', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1), ] outputs = [ - fx.Flow('Out1', 'Gas', relative_minimum=np.ones(10) * 0.01), - fx.Flow('Out2', 'Gas', relative_minimum=np.ones(10) * 0.01), + fx.Flow('Out1', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01), + fx.Flow('Out2', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01), ] comp = flixopt.elements.Component('TestComponent', inputs=inputs, outputs=outputs) flow_system.add_elements(comp) @@ -464,7 +464,9 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver): boiler = fx.linear_converters.Boiler( 'Boiler_Standard', thermal_efficiency=0.9, - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])), + thermal_flow=fx.Flow( + 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1]) + ), fuel_flow=fx.Flow('Q_fu', bus='Gas'), ) @@ -498,7 +500,7 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver): size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1000), ), out1=fx.Flow('Rohr1b', 'Fernwärme', size=1000), - in2=fx.Flow('Rohr2a', 'Fernwärme', size=fx.InvestParameters()), + in2=fx.Flow('Rohr2a', 'Fernwärme', size=fx.InvestParameters(maximum_size=1000)), out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000), balanced=True, ) @@ -541,7 +543,9 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver): boiler = fx.linear_converters.Boiler( 'Boiler_Standard', thermal_efficiency=0.9, - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])), + thermal_flow=fx.Flow( + 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1]) + ), fuel_flow=fx.Flow('Q_fu', bus='Gas'), ) @@ -578,7 +582,9 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver): in2=fx.Flow( 'Rohr2a', 'Fernwärme', - size=fx.InvestParameters(effects_of_investment_per_size=100, minimum_size=10, mandatory=True), + size=fx.InvestParameters( + effects_of_investment_per_size=100, minimum_size=10, maximum_size=1000, mandatory=True + ), ), out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000), balanced=False, diff --git a/tests/deprecated/test_effect.py b/tests/deprecated/test_effect.py index 10ae59bcc..b3bb278f0 100644 --- a/tests/deprecated/test_effect.py +++ b/tests/deprecated/test_effect.py @@ -253,7 +253,9 @@ def test_shares(self, basic_flow_system_linopy_coords, coords_config): thermal_flow=fx.Flow( 'Q_th', bus='Fernwärme', - size=fx.InvestParameters(effects_of_investment_per_size=10, minimum_size=20, mandatory=True), + size=fx.InvestParameters( + effects_of_investment_per_size=10, minimum_size=20, maximum_size=200, mandatory=True + ), ), fuel_flow=fx.Flow('Q_fu', bus='Gas'), ), diff --git a/tests/deprecated/test_flow.py b/tests/deprecated/test_flow.py index 0a1a03341..594bc1fbb 100644 --- a/tests/deprecated/test_flow.py +++ b/tests/deprecated/test_flow.py @@ -593,6 +593,7 @@ def test_effects_per_active_hour(self, basic_flow_system_linopy_coords, coords_c flow = fx.Flow( 'Wärme', bus='Fernwärme', + size=100, status_parameters=fx.StatusParameters( effects_per_active_hour={'costs': costs_per_running_hour, 'CO2': co2_per_running_hour} ), diff --git a/tests/deprecated/test_flow_system_resample.py b/tests/deprecated/test_flow_system_resample.py index 076de4a2e..c76946f80 100644 --- a/tests/deprecated/test_flow_system_resample.py +++ b/tests/deprecated/test_flow_system_resample.py @@ -278,8 +278,8 @@ def test_frequencies(freq, exp_len): assert len(fs.resample(freq, method='mean').timesteps) == exp_len -def test_irregular_timesteps(): - """Test irregular timesteps.""" +def test_irregular_timesteps_error(): + """Test that resampling irregular timesteps to finer resolution raises error without fill_gaps.""" ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time') fs = fx.FlowSystem(ts) fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) @@ -287,8 +287,26 @@ def test_irregular_timesteps(): fx.Sink(label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.ones(3), size=1)]) ) - fs_r = fs.resample('1h', method='mean') - assert len(fs_r.timesteps) > 0 + with pytest.raises(ValueError, match='Resampling created gaps'): + fs.resample('1h', method='mean') + + +def test_irregular_timesteps_with_fill_gaps(): + """Test that resampling irregular timesteps works with explicit fill_gaps strategy.""" + ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time') + fs = fx.FlowSystem(ts) + fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) + fs.add_elements( + fx.Sink( + label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)] + ) + ) + + # Test with ffill (using deprecated method) + fs_r = fs.resample('1h', method='mean', fill_gaps='ffill') + assert len(fs_r.timesteps) == 4 + # Gap at 02:00 should be filled with previous value (2.0) + assert_allclose(fs_r.flows['s(in)'].fixed_relative_profile.values, [1.0, 2.0, 2.0, 4.0]) if __name__ == '__main__': diff --git a/tests/deprecated/test_functional.py b/tests/deprecated/test_functional.py index c72754003..409e20a5f 100644 --- a/tests/deprecated/test_functional.py +++ b/tests/deprecated/test_functional.py @@ -186,7 +186,7 @@ def test_optimize_size(solver_fixture, time_steps_fixture): thermal_flow=fx.Flow( 'Q_th', bus='Fernwärme', - size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1), + size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1, maximum_size=100), ), ) ) @@ -227,7 +227,9 @@ def test_size_bounds(solver_fixture, time_steps_fixture): thermal_flow=fx.Flow( 'Q_th', bus='Fernwärme', - size=fx.InvestParameters(minimum_size=40, effects_of_investment=10, effects_of_investment_per_size=1), + size=fx.InvestParameters( + minimum_size=40, maximum_size=100, effects_of_investment=10, effects_of_investment_per_size=1 + ), ), ) ) @@ -269,7 +271,11 @@ def test_optional_invest(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=fx.InvestParameters( - mandatory=False, minimum_size=40, effects_of_investment=10, effects_of_investment_per_size=1 + mandatory=False, + minimum_size=40, + maximum_size=100, + effects_of_investment=10, + effects_of_investment_per_size=1, ), ), ), @@ -281,7 +287,11 @@ def test_optional_invest(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=fx.InvestParameters( - mandatory=False, minimum_size=50, effects_of_investment=10, effects_of_investment_per_size=1 + mandatory=False, + minimum_size=50, + maximum_size=100, + effects_of_investment=10, + effects_of_investment_per_size=1, ), ), ), diff --git a/tests/deprecated/test_scenarios.py b/tests/deprecated/test_scenarios.py index 366429831..b4a1cd161 100644 --- a/tests/deprecated/test_scenarios.py +++ b/tests/deprecated/test_scenarios.py @@ -212,10 +212,10 @@ def flow_system_piecewise_conversion_scenarios(flow_system_complex_scenarios) -> flow_system.add_elements( fx.LinearConverter( 'KWK', - inputs=[fx.Flow('Q_fu', bus='Gas')], + inputs=[fx.Flow('Q_fu', bus='Gas', size=200)], outputs=[ fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10), - fx.Flow('Q_th', bus='Fernwärme'), + fx.Flow('Q_th', bus='Fernwärme', size=100), ], piecewise_conversion=fx.PiecewiseConversion( { diff --git a/tests/deprecated/test_storage.py b/tests/deprecated/test_storage.py index a5d2c7a19..15170a321 100644 --- a/tests/deprecated/test_storage.py +++ b/tests/deprecated/test_storage.py @@ -451,6 +451,7 @@ def test_investment_parameters( 'effects_of_investment': 100, 'effects_of_investment_per_size': 10, 'mandatory': mandatory, + 'maximum_size': 100, } if minimum_size is not None: invest_params['minimum_size'] = minimum_size diff --git a/tests/test_component.py b/tests/test_component.py index 741f6390e..66d09aaee 100644 --- a/tests/test_component.py +++ b/tests/test_component.py @@ -31,12 +31,12 @@ def test_component(self, basic_flow_system_linopy_coords, coords_config): """Test that flow model constraints are correctly generated.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config inputs = [ - fx.Flow('In1', 'Fernwärme', relative_minimum=np.ones(10) * 0.1), - fx.Flow('In2', 'Fernwärme', relative_minimum=np.ones(10) * 0.1), + fx.Flow('In1', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1), + fx.Flow('In2', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1), ] outputs = [ - fx.Flow('Out1', 'Gas', relative_minimum=np.ones(10) * 0.01), - fx.Flow('Out2', 'Gas', relative_minimum=np.ones(10) * 0.01), + fx.Flow('Out1', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01), + fx.Flow('Out2', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01), ] comp = flixopt.elements.Component('TestComponent', inputs=inputs, outputs=outputs) flow_system.add_elements(comp) @@ -464,7 +464,9 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver): boiler = fx.linear_converters.Boiler( 'Boiler_Standard', thermal_efficiency=0.9, - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])), + thermal_flow=fx.Flow( + 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1]) + ), fuel_flow=fx.Flow('Q_fu', bus='Gas'), ) @@ -498,7 +500,7 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver): size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1000), ), out1=fx.Flow('Rohr1b', 'Fernwärme', size=1000), - in2=fx.Flow('Rohr2a', 'Fernwärme', size=fx.InvestParameters()), + in2=fx.Flow('Rohr2a', 'Fernwärme', size=fx.InvestParameters(maximum_size=1000)), out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000), balanced=True, ) @@ -537,7 +539,9 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver): boiler = fx.linear_converters.Boiler( 'Boiler_Standard', thermal_efficiency=0.9, - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])), + thermal_flow=fx.Flow( + 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1]) + ), fuel_flow=fx.Flow('Q_fu', bus='Gas'), ) @@ -574,7 +578,9 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver): in2=fx.Flow( 'Rohr2a', 'Fernwärme', - size=fx.InvestParameters(effects_of_investment_per_size=100, minimum_size=10, mandatory=True), + size=fx.InvestParameters( + effects_of_investment_per_size=100, minimum_size=10, maximum_size=1000, mandatory=True + ), ), out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000), balanced=False, diff --git a/tests/test_effect.py b/tests/test_effect.py index 7dcac9e1c..015e054eb 100644 --- a/tests/test_effect.py +++ b/tests/test_effect.py @@ -251,7 +251,9 @@ def test_shares(self, basic_flow_system_linopy_coords, coords_config, highs_solv thermal_flow=fx.Flow( 'Q_th', bus='Fernwärme', - size=fx.InvestParameters(effects_of_investment_per_size=10, minimum_size=20, mandatory=True), + size=fx.InvestParameters( + effects_of_investment_per_size=10, minimum_size=20, maximum_size=200, mandatory=True + ), ), fuel_flow=fx.Flow('Q_fu', bus='Gas'), ), diff --git a/tests/test_flow.py b/tests/test_flow.py index 0a1a03341..594bc1fbb 100644 --- a/tests/test_flow.py +++ b/tests/test_flow.py @@ -593,6 +593,7 @@ def test_effects_per_active_hour(self, basic_flow_system_linopy_coords, coords_c flow = fx.Flow( 'Wärme', bus='Fernwärme', + size=100, status_parameters=fx.StatusParameters( effects_per_active_hour={'costs': costs_per_running_hour, 'CO2': co2_per_running_hour} ), diff --git a/tests/test_flow_system_resample.py b/tests/test_flow_system_resample.py index 3da206646..7486b173c 100644 --- a/tests/test_flow_system_resample.py +++ b/tests/test_flow_system_resample.py @@ -271,8 +271,8 @@ def test_frequencies(freq, exp_len): assert len(fs.resample(freq, method='mean').timesteps) == exp_len -def test_irregular_timesteps(): - """Test irregular timesteps.""" +def test_irregular_timesteps_error(): + """Test that resampling irregular timesteps to finer resolution raises error without fill_gaps.""" ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time') fs = fx.FlowSystem(ts) fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) @@ -280,8 +280,26 @@ def test_irregular_timesteps(): fx.Sink(label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.ones(3), size=1)]) ) - fs_r = fs.resample('1h', method='mean') - assert len(fs_r.timesteps) > 0 + with pytest.raises(ValueError, match='Resampling created gaps'): + fs.transform.resample('1h', method='mean') + + +def test_irregular_timesteps_with_fill_gaps(): + """Test that resampling irregular timesteps works with explicit fill_gaps strategy.""" + ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time') + fs = fx.FlowSystem(ts) + fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) + fs.add_elements( + fx.Sink( + label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)] + ) + ) + + # Test with ffill + fs_r = fs.transform.resample('1h', method='mean', fill_gaps='ffill') + assert len(fs_r.timesteps) == 4 + # Gap at 02:00 should be filled with previous value (2.0) + assert_allclose(fs_r.flows['s(in)'].fixed_relative_profile.values, [1.0, 2.0, 2.0, 4.0]) if __name__ == '__main__': diff --git a/tests/test_functional.py b/tests/test_functional.py index 8cf67cff9..6d0f8a8fc 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -185,7 +185,7 @@ def test_optimize_size(solver_fixture, time_steps_fixture): thermal_flow=fx.Flow( 'Q_th', bus='Fernwärme', - size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1), + size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1, maximum_size=100), ), ) ) @@ -224,7 +224,9 @@ def test_size_bounds(solver_fixture, time_steps_fixture): thermal_flow=fx.Flow( 'Q_th', bus='Fernwärme', - size=fx.InvestParameters(minimum_size=40, effects_of_investment=10, effects_of_investment_per_size=1), + size=fx.InvestParameters( + minimum_size=40, maximum_size=100, effects_of_investment=10, effects_of_investment_per_size=1 + ), ), ) ) @@ -264,7 +266,11 @@ def test_optional_invest(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=fx.InvestParameters( - mandatory=False, minimum_size=40, effects_of_investment=10, effects_of_investment_per_size=1 + mandatory=False, + minimum_size=40, + maximum_size=100, + effects_of_investment=10, + effects_of_investment_per_size=1, ), ), ), @@ -276,7 +282,11 @@ def test_optional_invest(solver_fixture, time_steps_fixture): 'Q_th', bus='Fernwärme', size=fx.InvestParameters( - mandatory=False, minimum_size=50, effects_of_investment=10, effects_of_investment_per_size=1 + mandatory=False, + minimum_size=50, + maximum_size=100, + effects_of_investment=10, + effects_of_investment_per_size=1, ), ), ), diff --git a/tests/test_scenarios.py b/tests/test_scenarios.py index 366429831..b4a1cd161 100644 --- a/tests/test_scenarios.py +++ b/tests/test_scenarios.py @@ -212,10 +212,10 @@ def flow_system_piecewise_conversion_scenarios(flow_system_complex_scenarios) -> flow_system.add_elements( fx.LinearConverter( 'KWK', - inputs=[fx.Flow('Q_fu', bus='Gas')], + inputs=[fx.Flow('Q_fu', bus='Gas', size=200)], outputs=[ fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10), - fx.Flow('Q_th', bus='Fernwärme'), + fx.Flow('Q_th', bus='Fernwärme', size=100), ], piecewise_conversion=fx.PiecewiseConversion( { diff --git a/tests/test_storage.py b/tests/test_storage.py index a5d2c7a19..15170a321 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -451,6 +451,7 @@ def test_investment_parameters( 'effects_of_investment': 100, 'effects_of_investment_per_size': 10, 'mandatory': mandatory, + 'maximum_size': 100, } if minimum_size is not None: invest_params['minimum_size'] = minimum_size From 2c2ace1391a75f6892ec5bdda74276910cba370e Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 13 Dec 2025 17:28:17 +0100 Subject: [PATCH 37/49] Better notebooks and units in plots (#527) * Change default size to None. Raise if not set and Status is used * Make maximum size mandatory (or fixed size). Update tests accordingly * Adding maximum sizes for FLows which get Status variables (also if only through component status * Update more tests * BUGFIX: Minimum or foixed size in storages * Added validation: 5. Component with status_parameters now validates that all flows have explicit sizes * In flixopt/elements.py - Flow._plausibility_checks(): - relative_minimum > 0 requires size (lines 585-590) - relative_maximum < 1 requires size (lines 592-596) - load_factor_min requires size (lines 598-603) - load_factor_max requires size (lines 605-609) * Added more validations * Fix test * Fix tests to specify size if needed * Improve check verbosity * Fix type hint * Improve conftest.py * Added more notebooks * Improve notebooks * Improve plotting of Piecewises * Improve notebooks * Streamline notebook * Remove old notebook * fix notebook * fix notebook script * Extract PlotResults class into separate module and reuse * Improve notebook * Add carrier units and effect units to accessors * Add carrier units and effect units to notebooks * Add carrier units and effect units to notebooks * Decrease notebook font size * Fix tests * Fix docs --- docs/notebooks/02-heat-system.ipynb | 41 +- .../03-investment-optimization.ipynb | 63 +- .../04-operational-constraints.ipynb | 4825 +------- docs/notebooks/05-multi-carrier-system.ipynb | 183 +- docs/notebooks/06-piecewise-efficiency.ipynb | 633 -- .../06a-time-varying-parameters.ipynb | 339 + docs/notebooks/06b-piecewise-conversion.ipynb | 4371 ++++++++ docs/notebooks/06c-piecewise-effects.ipynb | 4544 ++++++++ docs/notebooks/07-scenarios-and-periods.ipynb | 11 +- .../08-large-scale-optimization.ipynb | 87 +- .../09-plotting-and-data-access.ipynb | 9827 ++++++++--------- docs/notebooks/10-transmission.ipynb | 418 + .../data/generate_example_systems.py | 14 +- docs/notebooks/index.md | 15 +- docs/stylesheets/extra.css | 6 +- .../building-models/choosing-components.md | 381 + docs/user-guide/building-models/index.md | 384 +- docs/user-guide/results-plotting.md | 2 +- flixopt/__init__.py | 2 + flixopt/clustering.py | 43 +- flixopt/interface.py | 235 +- flixopt/optimization.py | 3 +- flixopt/plot_result.py | 143 + flixopt/statistics_accessor.py | 146 +- flixopt/topology_accessor.py | 58 +- mkdocs.yml | 10 +- tests/test_solution_and_plotting.py | 17 +- tests/test_topology_accessor.py | 27 +- 28 files changed, 15555 insertions(+), 11273 deletions(-) delete mode 100644 docs/notebooks/06-piecewise-efficiency.ipynb create mode 100644 docs/notebooks/06a-time-varying-parameters.ipynb create mode 100644 docs/notebooks/06b-piecewise-conversion.ipynb create mode 100644 docs/notebooks/06c-piecewise-effects.ipynb create mode 100644 docs/notebooks/10-transmission.ipynb create mode 100644 docs/user-guide/building-models/choosing-components.md create mode 100644 flixopt/plot_result.py diff --git a/docs/notebooks/02-heat-system.ipynb b/docs/notebooks/02-heat-system.ipynb index 5028065fd..f1392c72f 100644 --- a/docs/notebooks/02-heat-system.ipynb +++ b/docs/notebooks/02-heat-system.ipynb @@ -149,46 +149,7 @@ "id": "9", "metadata": {}, "outputs": [], - "source": [ - "flow_system = fx.FlowSystem(timesteps)\n", - "\n", - "flow_system.add_elements(\n", - " # === Buses ===\n", - " fx.Bus('Gas', carrier='gas'),\n", - " fx.Bus('Heat', carrier='heat'),\n", - " # === Effect ===\n", - " fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n", - " # === Gas Supply with time-varying price ===\n", - " fx.Source(\n", - " 'GasGrid',\n", - " outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_price)],\n", - " ),\n", - " # === Gas Boiler: 150 kW, 92% efficiency ===\n", - " fx.linear_converters.Boiler(\n", - " 'Boiler',\n", - " thermal_efficiency=0.92,\n", - " thermal_flow=fx.Flow('Heat', bus='Heat', size=150),\n", - " fuel_flow=fx.Flow('Gas', bus='Gas'),\n", - " ),\n", - " # === Thermal Storage: 500 kWh tank ===\n", - " fx.Storage(\n", - " 'ThermalStorage',\n", - " capacity_in_flow_hours=500, # 500 kWh capacity\n", - " initial_charge_state=250, # Start half-full\n", - " minimal_final_charge_state=200, # End with at least 200 kWh\n", - " eta_charge=0.98, # 98% charging efficiency\n", - " eta_discharge=0.98, # 98% discharging efficiency\n", - " relative_loss_per_hour=0.005, # 0.5% heat loss per hour\n", - " charging=fx.Flow('Charge', bus='Heat', size=100), # Max 100 kW charging\n", - " discharging=fx.Flow('Discharge', bus='Heat', size=100), # Max 100 kW discharging\n", - " ),\n", - " # === Office Heat Demand ===\n", - " fx.Sink(\n", - " 'Office',\n", - " inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n", - " ),\n", - ")" - ] + "source": "flow_system = fx.FlowSystem(timesteps)\nflow_system.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nflow_system.add_elements(\n # === Buses ===\n fx.Bus('Gas', carrier='gas'),\n fx.Bus('Heat', carrier='heat'),\n # === Effect ===\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # === Gas Supply with time-varying price ===\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_price)],\n ),\n # === Gas Boiler: 150 kW, 92% efficiency ===\n fx.linear_converters.Boiler(\n 'Boiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow('Heat', bus='Heat', size=150),\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n # === Thermal Storage: 500 kWh tank ===\n fx.Storage(\n 'ThermalStorage',\n capacity_in_flow_hours=500, # 500 kWh capacity\n initial_charge_state=250, # Start half-full\n minimal_final_charge_state=200, # End with at least 200 kWh\n eta_charge=0.98, # 98% charging efficiency\n eta_discharge=0.98, # 98% discharging efficiency\n relative_loss_per_hour=0.005, # 0.5% heat loss per hour\n charging=fx.Flow('Charge', bus='Heat', size=100), # Max 100 kW charging\n discharging=fx.Flow('Discharge', bus='Heat', size=100), # Max 100 kW discharging\n ),\n # === Office Heat Demand ===\n fx.Sink(\n 'Office',\n inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n ),\n)" }, { "cell_type": "markdown", diff --git a/docs/notebooks/03-investment-optimization.ipynb b/docs/notebooks/03-investment-optimization.ipynb index 478f93798..ff62fe037 100644 --- a/docs/notebooks/03-investment-optimization.ipynb +++ b/docs/notebooks/03-investment-optimization.ipynb @@ -171,68 +171,7 @@ "id": "10", "metadata": {}, "outputs": [], - "source": [ - "flow_system = fx.FlowSystem(timesteps)\n", - "\n", - "flow_system.add_elements(\n", - " # === Buses ===\n", - " fx.Bus('Heat', carrier='heat'),\n", - " fx.Bus('Gas', carrier='gas'),\n", - " # === Effects ===\n", - " fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n", - " # === Gas Supply ===\n", - " fx.Source(\n", - " 'GasGrid',\n", - " outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=GAS_PRICE)],\n", - " ),\n", - " # === Gas Boiler (existing, fixed size) ===\n", - " fx.linear_converters.Boiler(\n", - " 'GasBoiler',\n", - " thermal_efficiency=0.92,\n", - " thermal_flow=fx.Flow('Heat', bus='Heat', size=200), # 200 kW existing\n", - " fuel_flow=fx.Flow('Gas', bus='Gas'),\n", - " ),\n", - " # === Solar Collectors (size to be optimized) ===\n", - " fx.Source(\n", - " 'SolarCollectors',\n", - " outputs=[\n", - " fx.Flow(\n", - " 'Heat',\n", - " bus='Heat',\n", - " # Investment optimization: find optimal size between 0-500 kW\n", - " size=fx.InvestParameters(\n", - " minimum_size=0,\n", - " maximum_size=500,\n", - " effects_of_investment_per_size={'costs': SOLAR_COST_WEEKLY},\n", - " ),\n", - " # Solar output depends on radiation profile\n", - " fixed_relative_profile=solar_profile,\n", - " )\n", - " ],\n", - " ),\n", - " # === Buffer Tank (size to be optimized) ===\n", - " fx.Storage(\n", - " 'BufferTank',\n", - " # Investment optimization: find optimal capacity between 0-2000 kWh\n", - " capacity_in_flow_hours=fx.InvestParameters(\n", - " minimum_size=0,\n", - " maximum_size=2000,\n", - " effects_of_investment_per_size={'costs': TANK_COST_WEEKLY},\n", - " ),\n", - " initial_charge_state=0,\n", - " eta_charge=0.95,\n", - " eta_discharge=0.95,\n", - " relative_loss_per_hour=0.01, # 1% loss per hour\n", - " charging=fx.Flow('Charge', bus='Heat', size=200),\n", - " discharging=fx.Flow('Discharge', bus='Heat', size=200),\n", - " ),\n", - " # === Pool Heat Demand ===\n", - " fx.Sink(\n", - " 'Pool',\n", - " inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=pool_demand)],\n", - " ),\n", - ")" - ] + "source": "flow_system = fx.FlowSystem(timesteps)\nflow_system.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nflow_system.add_elements(\n # === Buses ===\n fx.Bus('Heat', carrier='heat'),\n fx.Bus('Gas', carrier='gas'),\n # === Effects ===\n fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n # === Gas Supply ===\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=GAS_PRICE)],\n ),\n # === Gas Boiler (existing, fixed size) ===\n fx.linear_converters.Boiler(\n 'GasBoiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow('Heat', bus='Heat', size=200), # 200 kW existing\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n # === Solar Collectors (size to be optimized) ===\n fx.Source(\n 'SolarCollectors',\n outputs=[\n fx.Flow(\n 'Heat',\n bus='Heat',\n # Investment optimization: find optimal size between 0-500 kW\n size=fx.InvestParameters(\n minimum_size=0,\n maximum_size=500,\n effects_of_investment_per_size={'costs': SOLAR_COST_WEEKLY},\n ),\n # Solar output depends on radiation profile\n fixed_relative_profile=solar_profile,\n )\n ],\n ),\n # === Buffer Tank (size to be optimized) ===\n fx.Storage(\n 'BufferTank',\n # Investment optimization: find optimal capacity between 0-2000 kWh\n capacity_in_flow_hours=fx.InvestParameters(\n minimum_size=0,\n maximum_size=2000,\n effects_of_investment_per_size={'costs': TANK_COST_WEEKLY},\n ),\n initial_charge_state=0,\n eta_charge=0.95,\n eta_discharge=0.95,\n relative_loss_per_hour=0.01, # 1% loss per hour\n charging=fx.Flow('Charge', bus='Heat', size=200),\n discharging=fx.Flow('Discharge', bus='Heat', size=200),\n ),\n # === Pool Heat Demand ===\n fx.Sink(\n 'Pool',\n inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=pool_demand)],\n ),\n)" }, { "cell_type": "markdown", diff --git a/docs/notebooks/04-operational-constraints.ipynb b/docs/notebooks/04-operational-constraints.ipynb index d204b8809..9090b172b 100644 --- a/docs/notebooks/04-operational-constraints.ipynb +++ b/docs/notebooks/04-operational-constraints.ipynb @@ -1,36 +1,48 @@ { "cells": [ { - "cell_type": "markdown", - "id": "0", "metadata": {}, - "source": "# Constraints\n\nIndustrial boiler with startup costs, minimum uptime, and load constraints.\n\nThis notebook introduces:\n\n- **StatusParameters**: Model on/off decisions with constraints\n- **Startup costs**: Penalties for turning equipment on\n- **Minimum uptime/downtime**: Prevent rapid cycling\n- **Minimum load**: Equipment can't run below a certain output" + "cell_type": "markdown", + "source": [ + "# Constraints\n", + "\n", + "Industrial boiler with startup costs, minimum uptime, and load constraints.\n", + "\n", + "This notebook introduces:\n", + "\n", + "- **StatusParameters**: Model on/off decisions with constraints\n", + "- **Startup costs**: Penalties for turning equipment on\n", + "- **Minimum uptime/downtime**: Prevent rapid cycling\n", + "- **Minimum load**: Equipment can't run below a certain output" + ], + "id": "217ee38bd32426e5" }, { - "cell_type": "markdown", - "id": "1", "metadata": {}, - "source": [ - "## Setup" - ] + "cell_type": "markdown", + "source": "## Setup", + "id": "73f6d18d567c6329" }, { + "metadata": {}, "cell_type": "code", - "id": "2", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:33.779260Z", - "start_time": "2025-12-12T10:09:29.075288Z" - } - }, - "source": "import numpy as np\nimport pandas as pd\nimport plotly.express as px\nimport xarray as xr\n\nimport flixopt as fx\n\nfx.CONFIG.notebook()", "outputs": [], - "execution_count": null + "execution_count": null, + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "import plotly.express as px\n", + "import xarray as xr\n", + "\n", + "import flixopt as fx\n", + "\n", + "fx.CONFIG.notebook()" + ], + "id": "e8a50bb05c1400f2" }, { - "cell_type": "markdown", - "id": "3", "metadata": {}, + "cell_type": "markdown", "source": [ "## System Description\n", "\n", @@ -41,25 +53,20 @@ "- **Steam demand**: Varies with production schedule (high during shifts, low overnight)\n", "\n", "The main boiler is more efficient but has operational constraints. The backup is less efficient but flexible." - ] + ], + "id": "54d9decc2ccf8235" }, { - "cell_type": "markdown", - "id": "4", "metadata": {}, - "source": [ - "## Define Time Horizon and Demand" - ] + "cell_type": "markdown", + "source": "## Define Time Horizon and Demand", + "id": "65694ad43e7a1f42" }, { + "metadata": {}, "cell_type": "code", - "id": "5", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:33.901073Z", - "start_time": "2025-12-12T10:09:33.889283Z" - } - }, + "outputs": [], + "execution_count": null, "source": [ "# 3 days, hourly resolution\n", "timesteps = pd.date_range('2024-03-11', periods=72, freq='h')\n", @@ -88,4271 +95,119 @@ "print(f'Peak demand: {steam_demand.max():.0f} kW')\n", "print(f'Min demand: {steam_demand.min():.0f} kW')" ], - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Peak demand: 435 kW\n", - "Min demand: 50 kW\n" - ] - } - ], - "execution_count": 2 + "id": "8c606ee48c294628" }, { + "metadata": {}, "cell_type": "code", - "id": "6", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:34.074761Z", - "start_time": "2025-12-12T10:09:33.923626Z" - } - }, - "source": [ - "px.line(x=timesteps, y=steam_demand, title='Factory Steam Demand', labels={'x': 'Time', 'y': 'kW'})" - ], - "outputs": [ - { - "data": { - "text/html": [ - " \n", - " \n", - " " - ] - }, - "metadata": {}, - "output_type": "display_data", - "jetTransient": { - "display_id": null - } - }, - { - "data": { - "text/html": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data", - "jetTransient": { - "display_id": null - } - } - ], - "execution_count": 3 + "outputs": [], + "execution_count": null, + "source": "px.line(x=timesteps, y=steam_demand, title='Factory Steam Demand', labels={'x': 'Time', 'y': 'kW'})", + "id": "fd4f46fa717b1572" }, { - "cell_type": "markdown", - "id": "7", "metadata": {}, - "source": [ - "## Build System with Operational Constraints" - ] + "cell_type": "markdown", + "source": "## Build System with Operational Constraints", + "id": "2d823131e625dcfa" }, { + "metadata": {}, "cell_type": "code", - "id": "8", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:38.270762Z", - "start_time": "2025-12-12T10:09:38.098962Z" - } - }, - "source": "flow_system = fx.FlowSystem(timesteps)\n\n# Define and register a custom carrier for process steam\nsteam_carrier = fx.Carrier('steam', color='#87CEEB', unit='kW_th', description='Process steam')\nflow_system.add_carriers(steam_carrier)\n\nflow_system.add_elements(\n # === Buses ===\n fx.Bus('Gas', carrier='gas'),\n fx.Bus('Steam', carrier='steam'),\n # === Effect ===\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # === Gas Supply ===\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)],\n ),\n # === Main Industrial Boiler (with operational constraints) ===\n fx.linear_converters.Boiler(\n 'MainBoiler',\n thermal_efficiency=0.94, # High efficiency\n # StatusParameters define on/off behavior\n status_parameters=fx.StatusParameters(\n effects_per_startup={'costs': 50}, # 50€ startup cost\n min_uptime=4, # Must run at least 4 hours once started\n min_downtime=2, # Must stay off at least 2 hours\n ),\n thermal_flow=fx.Flow(\n 'Steam',\n bus='Steam',\n size=500,\n relative_minimum=0.3, # Minimum load: 30% = 150 kW\n ),\n fuel_flow=fx.Flow('Gas', bus='Gas', size=550), # size required for big-M constraints\n ),\n # === Backup Boiler (flexible, but less efficient) ===\n fx.linear_converters.Boiler(\n 'BackupBoiler',\n thermal_efficiency=0.85, # Lower efficiency\n # No status parameters = can turn on/off freely\n thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n # === Factory Steam Demand ===\n fx.Sink(\n 'Factory',\n inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)],\n ),\n)", "outputs": [], - "execution_count": null + "execution_count": null, + "source": [ + "flow_system = fx.FlowSystem(timesteps)\n", + "\n", + "# Define and register custom carriers\n", + "flow_system.add_carriers(\n", + " fx.Carrier('gas', '#3498db', 'kW'),\n", + " fx.Carrier('steam', '#87CEEB', 'kW_th', 'Process steam'),\n", + ")\n", + "\n", + "flow_system.add_elements(\n", + " # === Buses ===\n", + " fx.Bus('Gas', carrier='gas'),\n", + " fx.Bus('Steam', carrier='steam'),\n", + " # === Effect ===\n", + " fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n", + " # === Gas Supply ===\n", + " fx.Source(\n", + " 'GasGrid',\n", + " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)],\n", + " ),\n", + " # === Main Industrial Boiler (with operational constraints) ===\n", + " fx.linear_converters.Boiler(\n", + " 'MainBoiler',\n", + " thermal_efficiency=0.94, # High efficiency\n", + " # StatusParameters define on/off behavior\n", + " status_parameters=fx.StatusParameters(\n", + " effects_per_startup={'costs': 50}, # 50€ startup cost\n", + " min_uptime=4, # Must run at least 4 hours once started\n", + " min_downtime=2, # Must stay off at least 2 hours\n", + " ),\n", + " thermal_flow=fx.Flow(\n", + " 'Steam',\n", + " bus='Steam',\n", + " size=500,\n", + " relative_minimum=0.3, # Minimum load: 30% = 150 kW\n", + " ),\n", + " fuel_flow=fx.Flow('Gas', bus='Gas', size=600), # Size required for status_parameters\n", + " ),\n", + " # === Backup Boiler (flexible, but less efficient) ===\n", + " fx.linear_converters.Boiler(\n", + " 'BackupBoiler',\n", + " thermal_efficiency=0.85, # Lower efficiency\n", + " # No status parameters = can turn on/off freely\n", + " thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n", + " fuel_flow=fx.Flow('Gas', bus='Gas'),\n", + " ),\n", + " # === Factory Steam Demand ===\n", + " fx.Sink(\n", + " 'Factory',\n", + " inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)],\n", + " ),\n", + ")" + ], + "id": "736dfa9a935f6c7e" }, { - "cell_type": "markdown", - "id": "9", "metadata": {}, - "source": [ - "## Run Optimization" - ] + "cell_type": "markdown", + "source": "## Run Optimization", + "id": "70ae8aaa82997d51" }, { + "metadata": {}, "cell_type": "code", - "id": "10", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:40.554120Z", - "start_time": "2025-12-12T10:09:38.362282Z" - } - }, - "source": [ - "flow_system.optimize(fx.solvers.HighsSolver(mip_gap=0.01));" - ], - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Running HiGHS 1.12.0 (git hash: 755a8e0): Copyright (c) 2025 HiGHS under MIT licence terms\n", - "MIP linopy-problem-6hzam9p_ has 1814 rows; 1311 cols; 5121 nonzeros; 432 integer variables (432 binary)\n", - "Coefficient ranges:\n", - " Matrix [1e-05, 1e+07]\n", - " Cost [1e+00, 1e+00]\n", - " Bound [1e+00, 1e+07]\n", - " RHS [1e-05, 7e+01]\n", - "WARNING: Problem has some excessively large column bounds\n", - "WARNING: Problem has some excessively small row bounds\n", - "WARNING: Consider scaling the bounds by 1e-1, or setting the user_bound_scale option to -4\n", - "Presolving model\n", - "1212 rows, 645 cols, 2987 nonzeros 0s\n", - "0 rows, 0 cols, 0 nonzeros 0s\n", - "Presolve reductions: rows 0(-1814); columns 0(-1311); nonzeros 0(-5121) - Reduced to empty\n", - "Presolve: Optimal\n", - "\n", - "Src: B => Branching; C => Central rounding; F => Feasibility pump; H => Heuristic;\n", - " I => Shifting; J => Feasibility jump; L => Sub-MIP; P => Empty MIP; R => Randomized rounding;\n", - " S => Solve LP; T => Evaluate node; U => Unbounded; X => User solution; Y => HiGHS solution;\n", - " Z => ZI Round; l => Trivial lower; p => Trivial point; u => Trivial upper; z => Trivial zero\n", - "\n", - " Nodes | B&B Tree | Objective Bounds | Dynamic Constraints | Work \n", - "Src Proc. InQueue | Leaves Expl. | BestBound BestSol Gap | Cuts InLp Confl. | LpIters Time\n", - "\n", - " 0 0 0 0.00% 1441.320174 1441.320174 0.00% 0 0 0 0 0.0s\n", - "\n", - "Solving report\n", - " Model linopy-problem-6hzam9p_\n", - " Status Optimal\n", - " Primal bound 1441.3201735\n", - " Dual bound 1441.3201735\n", - " Gap 0% (tolerance: 1%)\n", - " P-D integral 0\n", - " Solution status feasible\n", - " 1441.3201735 (objective)\n", - " 0 (bound viol.)\n", - " 0 (int. viol.)\n", - " 0 (row viol.)\n", - " Timing 0.01\n", - " Max sub-MIP depth 0\n", - " Nodes 0\n", - " Repair LPs 0\n", - " LP iterations 0\n" - ] - } - ], - "execution_count": 5 + "outputs": [], + "execution_count": null, + "source": "flow_system.optimize(fx.solvers.HighsSolver(mip_gap=0.01));", + "id": "76f27e3afe64f8c5" }, { - "cell_type": "markdown", - "id": "11", "metadata": {}, + "cell_type": "markdown", "source": [ "## Analyze Results\n", "\n", "### Steam Balance\n", "\n", "See how the two boilers share the load:" - ] + ], + "id": "c42e2778fd0a8ca" }, { + "metadata": {}, "cell_type": "code", - "id": "12", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:40.792746Z", - "start_time": "2025-12-12T10:09:40.580142Z" - } - }, - "source": [ - "flow_system.statistics.plot.balance('Steam')" - ], - "outputs": [ - { - "data": { - "text/plain": [ - "PlotResult(data= Size: 2kB\n", - "Dimensions: (time: 73)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 584B 2024-03-11 ... 2024-03-14\n", - "Data variables:\n", - " MainBoiler(Steam) (time) float64 584B -0.0 -0.0 -0.0 ... -0.0 -0.0 nan\n", - " BackupBoiler(Steam) (time) float64 584B -58.29 -99.95 -85.66 ... -63.38 nan\n", - " Factory(Steam) (time) float64 584B 58.29 99.95 85.66 ... 63.38 nan, figure=Figure({\n", - " 'data': [{'hovertemplate': 'variable=MainBoiler(Steam)
time=%{x}
value=%{y}',\n", - " 'legendgroup': 'MainBoiler(Steam)',\n", - " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'MainBoiler(Steam)',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['2024-03-11T00:00:00.000000000', '2024-03-11T01:00:00.000000000',\n", - " '2024-03-11T02:00:00.000000000', '2024-03-11T03:00:00.000000000',\n", - " '2024-03-11T04:00:00.000000000', '2024-03-11T05:00:00.000000000',\n", - " '2024-03-11T06:00:00.000000000', '2024-03-11T07:00:00.000000000',\n", - " '2024-03-11T08:00:00.000000000', '2024-03-11T09:00:00.000000000',\n", - " '2024-03-11T10:00:00.000000000', '2024-03-11T11:00:00.000000000',\n", - " '2024-03-11T12:00:00.000000000', '2024-03-11T13:00:00.000000000',\n", - " '2024-03-11T14:00:00.000000000', '2024-03-11T15:00:00.000000000',\n", - " '2024-03-11T16:00:00.000000000', '2024-03-11T17:00:00.000000000',\n", - " '2024-03-11T18:00:00.000000000', '2024-03-11T19:00:00.000000000',\n", - " '2024-03-11T20:00:00.000000000', '2024-03-11T21:00:00.000000000',\n", - " '2024-03-11T22:00:00.000000000', '2024-03-11T23:00:00.000000000',\n", - " '2024-03-12T00:00:00.000000000', '2024-03-12T01:00:00.000000000',\n", - " '2024-03-12T02:00:00.000000000', '2024-03-12T03:00:00.000000000',\n", - " '2024-03-12T04:00:00.000000000', '2024-03-12T05:00:00.000000000',\n", - " '2024-03-12T06:00:00.000000000', '2024-03-12T07:00:00.000000000',\n", - " '2024-03-12T08:00:00.000000000', '2024-03-12T09:00:00.000000000',\n", - " '2024-03-12T10:00:00.000000000', '2024-03-12T11:00:00.000000000',\n", - " '2024-03-12T12:00:00.000000000', '2024-03-12T13:00:00.000000000',\n", - " '2024-03-12T14:00:00.000000000', '2024-03-12T15:00:00.000000000',\n", - " '2024-03-12T16:00:00.000000000', '2024-03-12T17:00:00.000000000',\n", - " '2024-03-12T18:00:00.000000000', '2024-03-12T19:00:00.000000000',\n", - " '2024-03-12T20:00:00.000000000', '2024-03-12T21:00:00.000000000',\n", - " '2024-03-12T22:00:00.000000000', '2024-03-12T23:00:00.000000000',\n", - " '2024-03-13T00:00:00.000000000', '2024-03-13T01:00:00.000000000',\n", - " '2024-03-13T02:00:00.000000000', '2024-03-13T03:00:00.000000000',\n", - " '2024-03-13T04:00:00.000000000', '2024-03-13T05:00:00.000000000',\n", - " '2024-03-13T06:00:00.000000000', '2024-03-13T07:00:00.000000000',\n", - " '2024-03-13T08:00:00.000000000', '2024-03-13T09:00:00.000000000',\n", - " '2024-03-13T10:00:00.000000000', '2024-03-13T11:00:00.000000000',\n", - " '2024-03-13T12:00:00.000000000', '2024-03-13T13:00:00.000000000',\n", - " '2024-03-13T14:00:00.000000000', '2024-03-13T15:00:00.000000000',\n", - " '2024-03-13T16:00:00.000000000', '2024-03-13T17:00:00.000000000',\n", - " '2024-03-13T18:00:00.000000000', '2024-03-13T19:00:00.000000000',\n", - " '2024-03-13T20:00:00.000000000', '2024-03-13T21:00:00.000000000',\n", - " '2024-03-13T22:00:00.000000000', '2024-03-13T23:00:00.000000000',\n", - " '2024-03-14T00:00:00.000000000'], dtype='datetime64[ns]'),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'AAAAAAgAAAAAAAAACAAAAAAAAA+P8='),\n", - " 'dtype': 'f8'},\n", - " 'yaxis': 'y'},\n", - " {'hovertemplate': 'variable=BackupBoiler(Steam)
time=%{x}
value=%{y}',\n", - " 'legendgroup': 'BackupBoiler(Steam)',\n", - " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'BackupBoiler(Steam)',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['2024-03-11T00:00:00.000000000', '2024-03-11T01:00:00.000000000',\n", - " '2024-03-11T02:00:00.000000000', '2024-03-11T03:00:00.000000000',\n", - " '2024-03-11T04:00:00.000000000', '2024-03-11T05:00:00.000000000',\n", - " '2024-03-11T06:00:00.000000000', '2024-03-11T07:00:00.000000000',\n", - " '2024-03-11T08:00:00.000000000', '2024-03-11T09:00:00.000000000',\n", - " '2024-03-11T10:00:00.000000000', '2024-03-11T11:00:00.000000000',\n", - " '2024-03-11T12:00:00.000000000', '2024-03-11T13:00:00.000000000',\n", - " '2024-03-11T14:00:00.000000000', '2024-03-11T15:00:00.000000000',\n", - " '2024-03-11T16:00:00.000000000', '2024-03-11T17:00:00.000000000',\n", - " '2024-03-11T18:00:00.000000000', '2024-03-11T19:00:00.000000000',\n", - " '2024-03-11T20:00:00.000000000', '2024-03-11T21:00:00.000000000',\n", - " '2024-03-11T22:00:00.000000000', '2024-03-11T23:00:00.000000000',\n", - " '2024-03-12T00:00:00.000000000', '2024-03-12T01:00:00.000000000',\n", - " '2024-03-12T02:00:00.000000000', '2024-03-12T03:00:00.000000000',\n", - " '2024-03-12T04:00:00.000000000', '2024-03-12T05:00:00.000000000',\n", - " '2024-03-12T06:00:00.000000000', '2024-03-12T07:00:00.000000000',\n", - " '2024-03-12T08:00:00.000000000', '2024-03-12T09:00:00.000000000',\n", - " '2024-03-12T10:00:00.000000000', '2024-03-12T11:00:00.000000000',\n", - " '2024-03-12T12:00:00.000000000', '2024-03-12T13:00:00.000000000',\n", - " '2024-03-12T14:00:00.000000000', '2024-03-12T15:00:00.000000000',\n", - " '2024-03-12T16:00:00.000000000', '2024-03-12T17:00:00.000000000',\n", - " '2024-03-12T18:00:00.000000000', '2024-03-12T19:00:00.000000000',\n", - " '2024-03-12T20:00:00.000000000', '2024-03-12T21:00:00.000000000',\n", - " '2024-03-12T22:00:00.000000000', '2024-03-12T23:00:00.000000000',\n", - " '2024-03-13T00:00:00.000000000', '2024-03-13T01:00:00.000000000',\n", - " '2024-03-13T02:00:00.000000000', '2024-03-13T03:00:00.000000000',\n", - " '2024-03-13T04:00:00.000000000', '2024-03-13T05:00:00.000000000',\n", - " '2024-03-13T06:00:00.000000000', '2024-03-13T07:00:00.000000000',\n", - " '2024-03-13T08:00:00.000000000', '2024-03-13T09:00:00.000000000',\n", - " '2024-03-13T10:00:00.000000000', '2024-03-13T11:00:00.000000000',\n", - " '2024-03-13T12:00:00.000000000', '2024-03-13T13:00:00.000000000',\n", - " '2024-03-13T14:00:00.000000000', '2024-03-13T15:00:00.000000000',\n", - " '2024-03-13T16:00:00.000000000', '2024-03-13T17:00:00.000000000',\n", - " '2024-03-13T18:00:00.000000000', '2024-03-13T19:00:00.000000000',\n", - " '2024-03-13T20:00:00.000000000', '2024-03-13T21:00:00.000000000',\n", - " '2024-03-13T22:00:00.000000000', '2024-03-13T23:00:00.000000000',\n", - " '2024-03-14T00:00:00.000000000'], dtype='datetime64[ns]'),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': ('mN+4IMkkTcCAdOsnmvxYwMQF7WQ2al' ... 'IhmldWwF2t/0Q+sE/AAAAAAAAA+P8='),\n", - " 'dtype': 'f8'},\n", - " 'yaxis': 'y'},\n", - " {'hovertemplate': 'variable=Factory(Steam)
time=%{x}
value=%{y}',\n", - " 'legendgroup': 'Factory(Steam)',\n", - " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'Factory(Steam)',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['2024-03-11T00:00:00.000000000', '2024-03-11T01:00:00.000000000',\n", - " '2024-03-11T02:00:00.000000000', '2024-03-11T03:00:00.000000000',\n", - " '2024-03-11T04:00:00.000000000', '2024-03-11T05:00:00.000000000',\n", - " '2024-03-11T06:00:00.000000000', '2024-03-11T07:00:00.000000000',\n", - " '2024-03-11T08:00:00.000000000', '2024-03-11T09:00:00.000000000',\n", - " '2024-03-11T10:00:00.000000000', '2024-03-11T11:00:00.000000000',\n", - " '2024-03-11T12:00:00.000000000', '2024-03-11T13:00:00.000000000',\n", - " '2024-03-11T14:00:00.000000000', '2024-03-11T15:00:00.000000000',\n", - " '2024-03-11T16:00:00.000000000', '2024-03-11T17:00:00.000000000',\n", - " '2024-03-11T18:00:00.000000000', '2024-03-11T19:00:00.000000000',\n", - " '2024-03-11T20:00:00.000000000', '2024-03-11T21:00:00.000000000',\n", - " '2024-03-11T22:00:00.000000000', '2024-03-11T23:00:00.000000000',\n", - " '2024-03-12T00:00:00.000000000', '2024-03-12T01:00:00.000000000',\n", - " '2024-03-12T02:00:00.000000000', '2024-03-12T03:00:00.000000000',\n", - " '2024-03-12T04:00:00.000000000', '2024-03-12T05:00:00.000000000',\n", - " '2024-03-12T06:00:00.000000000', '2024-03-12T07:00:00.000000000',\n", - " '2024-03-12T08:00:00.000000000', '2024-03-12T09:00:00.000000000',\n", - " '2024-03-12T10:00:00.000000000', '2024-03-12T11:00:00.000000000',\n", - " '2024-03-12T12:00:00.000000000', '2024-03-12T13:00:00.000000000',\n", - " '2024-03-12T14:00:00.000000000', '2024-03-12T15:00:00.000000000',\n", - " '2024-03-12T16:00:00.000000000', '2024-03-12T17:00:00.000000000',\n", - " '2024-03-12T18:00:00.000000000', '2024-03-12T19:00:00.000000000',\n", - " '2024-03-12T20:00:00.000000000', '2024-03-12T21:00:00.000000000',\n", - " '2024-03-12T22:00:00.000000000', '2024-03-12T23:00:00.000000000',\n", - " '2024-03-13T00:00:00.000000000', '2024-03-13T01:00:00.000000000',\n", - " '2024-03-13T02:00:00.000000000', '2024-03-13T03:00:00.000000000',\n", - " '2024-03-13T04:00:00.000000000', '2024-03-13T05:00:00.000000000',\n", - " '2024-03-13T06:00:00.000000000', '2024-03-13T07:00:00.000000000',\n", - " '2024-03-13T08:00:00.000000000', '2024-03-13T09:00:00.000000000',\n", - " '2024-03-13T10:00:00.000000000', '2024-03-13T11:00:00.000000000',\n", - " '2024-03-13T12:00:00.000000000', '2024-03-13T13:00:00.000000000',\n", - " '2024-03-13T14:00:00.000000000', '2024-03-13T15:00:00.000000000',\n", - " '2024-03-13T16:00:00.000000000', '2024-03-13T17:00:00.000000000',\n", - " '2024-03-13T18:00:00.000000000', '2024-03-13T19:00:00.000000000',\n", - " '2024-03-13T20:00:00.000000000', '2024-03-13T21:00:00.000000000',\n", - " '2024-03-13T22:00:00.000000000', '2024-03-13T23:00:00.000000000',\n", - " '2024-03-14T00:00:00.000000000'], dtype='datetime64[ns]'),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': ('l9+4IMkkTUCAdOsnmvxYQMQF7WQ2al' ... 'IhmldWQF6t/0Q+sE9AAAAAAAAA+H8='),\n", - " 'dtype': 'f8'},\n", - " 'yaxis': 'y'}],\n", - " 'layout': {'bargap': 0,\n", - " 'bargroupgap': 0,\n", - " 'barmode': 'relative',\n", - " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", - " 'template': '...',\n", - " 'title': {'text': 'Steam (flow_rate)'},\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", - " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", - "}))" - ], - "text/html": [ - "
\n", - "
" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "execution_count": 6 + "outputs": [], + "execution_count": null, + "source": "flow_system.statistics.plot.balance('Steam')", + "id": "9da80bc8faca05cd" }, { - "cell_type": "markdown", - "id": "13", "metadata": {}, + "cell_type": "markdown", "source": [ "### Main Boiler Operation\n", "\n", @@ -4360,97 +215,32 @@ "- Runs continuously during production (respecting min uptime)\n", "- Stays above minimum load (30%)\n", "- Shuts down during low-demand periods" - ] + ], + "id": "c885d25675d71371" }, { + "metadata": {}, "cell_type": "code", - "id": "14", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:41.061182Z", - "start_time": "2025-12-12T10:09:40.850044Z" - } - }, - "source": [ - "flow_system.statistics.plot.heatmap('MainBoiler(Steam)')" - ], - "outputs": [ - { - "data": { - "text/plain": [ - "PlotResult(data= Size: 992B\n", - "Dimensions: (timeframe: 4, timestep: 24)\n", - "Coordinates:\n", - " * timeframe (timeframe) object 32B '2024-03-11' '2024-03-12' ... '2024-03-14'\n", - " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", - "Data variables:\n", - " value (timestep, timeframe) float64 768B 0.0 0.0 0.0 ... 0.0 0.0 nan, figure=Figure({\n", - " 'data': [{'coloraxis': 'coloraxis',\n", - " 'hovertemplate': ('timeframe: %{x}
timestep: %' ... 'flow_rate: %{z}'),\n", - " 'name': '0',\n", - " 'type': 'heatmap',\n", - " 'x': array(['2024-03-11', '2024-03-12', '2024-03-13', '2024-03-14'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", - " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", - " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", - " dtype=object),\n", - " 'yaxis': 'y',\n", - " 'z': {'bdata': ('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAPh/'),\n", - " 'dtype': 'f8',\n", - " 'shape': '24, 4'}}],\n", - " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'MainBoiler(Steam)|flow_rate'}},\n", - " 'colorscale': [[0.0, '#30123b'],\n", - " [0.07142857142857142, '#4145ab'],\n", - " [0.14285714285714285, '#4675ed'],\n", - " [0.21428571428571427, '#39a2fc'],\n", - " [0.2857142857142857, '#1bcfd4'],\n", - " [0.35714285714285715, '#24eca6'],\n", - " [0.42857142857142855, '#61fc6c'], [0.5,\n", - " '#a4fc3b'], [0.5714285714285714,\n", - " '#d1e834'], [0.6428571428571429,\n", - " '#f3c63a'], [0.7142857142857143,\n", - " '#fe9b2d'], [0.7857142857142857,\n", - " '#f36315'], [0.8571428571428571,\n", - " '#d93806'], [0.9285714285714286,\n", - " '#b11901'], [1.0, '#7a0402']]},\n", - " 'margin': {'t': 60},\n", - " 'template': '...',\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", - " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", - "}))" - ], - "text/html": [ - "
\n", - "
" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "execution_count": 7 + "outputs": [], + "execution_count": null, + "source": "flow_system.statistics.plot.heatmap('MainBoiler(Steam)')", + "id": "5a549b8b60f32745" }, { - "cell_type": "markdown", - "id": "15", "metadata": {}, + "cell_type": "markdown", "source": [ "### On/Off Status\n", "\n", "Track the boiler's operational status:" - ] + ], + "id": "66816d462d2f2654" }, { + "metadata": {}, "cell_type": "code", - "id": "16", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:41.315830Z", - "start_time": "2025-12-12T10:09:41.184920Z" - } - }, + "outputs": [], + "execution_count": null, "source": [ "# Merge solution DataArrays directly - xarray aligns coordinates automatically\n", "status_ds = xr.Dataset(\n", @@ -4466,294 +256,136 @@ "fig.for_each_annotation(lambda a: a.update(text=a.text.split('=')[-1]))\n", "fig" ], - "outputs": [ - { - "data": { - "text/html": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data", - "jetTransient": { - "display_id": null - } - } - ], - "execution_count": 8 + "id": "41801a37f07aa265" }, { - "cell_type": "markdown", - "id": "17", "metadata": {}, - "source": [ - "### Startup Count and Costs" - ] + "cell_type": "markdown", + "source": "### Startup Count and Costs", + "id": "7ca893f03606362" }, { + "metadata": {}, "cell_type": "code", - "id": "18", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:41.443170Z", - "start_time": "2025-12-12T10:09:41.430976Z" - } - }, - "source": "total_startups = int(flow_system.solution['MainBoiler|startup'].sum().item())\ntotal_costs = flow_system.solution['costs'].item()\nstartup_costs = total_startups * 50\ngas_costs = total_costs - startup_costs\n\nprint('=== Cost Breakdown ===')\nprint(f'Number of startups: {total_startups}')\nprint(f'Startup costs: {startup_costs:.0f} €')\nprint(f'Gas costs: {gas_costs:.2f} €')\nprint(f'Total costs: {total_costs:.2f} €')", - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "=== Cost Breakdown ===\n", - "Number of startups: 3\n", - "Startup costs: 150 €\n", - "Gas costs: 1291.32 €\n", - "Total costs: 1441.32 €\n" - ] - } + "outputs": [], + "execution_count": null, + "source": [ + "total_startups = int(flow_system.solution['MainBoiler|startup'].sum().item())\n", + "total_costs = flow_system.solution['costs'].item()\n", + "startup_costs = total_startups * 50\n", + "gas_costs = total_costs - startup_costs\n", + "\n", + "print('=== Cost Breakdown ===')\n", + "print(f'Number of startups: {total_startups}')\n", + "print(f'Startup costs: {startup_costs:.0f} €')\n", + "print(f'Gas costs: {gas_costs:.2f} €')\n", + "print(f'Total costs: {total_costs:.2f} €')" ], - "execution_count": 9 + "id": "a95273c9775e1fd9" }, { - "cell_type": "markdown", - "id": "19", "metadata": {}, + "cell_type": "markdown", "source": [ "### Duration Curves\n", "\n", "See how often each boiler operates at different load levels:" - ] + ], + "id": "e29cf8ae428387bd" }, { + "metadata": {}, "cell_type": "code", - "id": "20", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:41.543723Z", - "start_time": "2025-12-12T10:09:41.463211Z" - } - }, - "source": [ - "flow_system.statistics.plot.duration_curve('MainBoiler(Steam)')" - ], - "outputs": [ - { - "data": { - "text/plain": [ - "PlotResult(data= Size: 1kB\n", - "Dimensions: (duration: 73)\n", - "Coordinates:\n", - " * duration (duration) int64 584B 0 1 2 3 4 5 6 ... 67 68 69 70 71 72\n", - "Data variables:\n", - " MainBoiler(Steam) (duration) float64 584B nan 435.1 429.9 ... 0.0 0.0 0.0, figure=Figure({\n", - " 'data': [{'hovertemplate': 'variable=MainBoiler(Steam)
duration=%{x}
value=%{y}',\n", - " 'legendgroup': 'MainBoiler(Steam)',\n", - " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", - " 'marker': {'symbol': 'circle'},\n", - " 'mode': 'lines',\n", - " 'name': 'MainBoiler(Steam)',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'type': 'scatter',\n", - " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", - " 'dtype': 'i1'},\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': ('/////////3+Tw6lGkDF7QCDE7iqb3n' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", - " 'dtype': 'f8'},\n", - " 'yaxis': 'y'}],\n", - " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", - " 'template': '...',\n", - " 'title': {'text': 'Duration Curve'},\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n", - " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", - "}))" - ], - "text/html": [ - "
\n", - "
" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "execution_count": 10 + "outputs": [], + "execution_count": null, + "source": "flow_system.statistics.plot.duration_curve('MainBoiler(Steam)')", + "id": "14e906ea8912de10" }, { + "metadata": {}, "cell_type": "code", - "id": "21", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:41.641113Z", - "start_time": "2025-12-12T10:09:41.565242Z" - } - }, - "source": [ - "flow_system.statistics.plot.duration_curve('BackupBoiler(Steam)')" - ], - "outputs": [ - { - "data": { - "text/plain": [ - "PlotResult(data= Size: 1kB\n", - "Dimensions: (duration: 73)\n", - "Coordinates:\n", - " * duration (duration) int64 584B 0 1 2 3 4 5 ... 67 68 69 70 71 72\n", - "Data variables:\n", - " BackupBoiler(Steam) (duration) float64 584B nan 127.8 ... -2.543e-14, figure=Figure({\n", - " 'data': [{'hovertemplate': 'variable=BackupBoiler(Steam)
duration=%{x}
value=%{y}',\n", - " 'legendgroup': 'BackupBoiler(Steam)',\n", - " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", - " 'marker': {'symbol': 'circle'},\n", - " 'mode': 'lines',\n", - " 'name': 'BackupBoiler(Steam)',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'type': 'scatter',\n", - " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", - " 'dtype': 'i1'},\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': ('/////////3/DcRZAOvZfQHoLJc/SMF' ... 'nA0x0XvXh0E0qVJBy9UQm+McCgHL0='),\n", - " 'dtype': 'f8'},\n", - " 'yaxis': 'y'}],\n", - " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", - " 'template': '...',\n", - " 'title': {'text': 'Duration Curve'},\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n", - " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", - "}))" - ], - "text/html": [ - "
\n", - "
" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "execution_count": 11 + "outputs": [], + "execution_count": null, + "source": "flow_system.statistics.plot.duration_curve('BackupBoiler(Steam)')", + "id": "15d6068612a73f84" }, { - "cell_type": "markdown", - "id": "22", "metadata": {}, + "cell_type": "markdown", "source": [ "## Compare: Without Operational Constraints\n", "\n", "What if the main boiler had no startup costs or minimum uptime?" - ] + ], + "id": "8354cd68733d5086" }, { + "metadata": {}, "cell_type": "code", - "id": "23", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:42.275686Z", - "start_time": "2025-12-12T10:09:41.660745Z" - } - }, - "source": "# Build unconstrained system\nfs_unconstrained = fx.FlowSystem(timesteps)\nfs_unconstrained.add_carriers(steam_carrier) # Reuse the custom carrier\n\nfs_unconstrained.add_elements(\n fx.Bus('Gas', carrier='gas'),\n fx.Bus('Steam', carrier='steam'),\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n # Main boiler WITHOUT status parameters\n fx.linear_converters.Boiler(\n 'MainBoiler',\n thermal_efficiency=0.94,\n thermal_flow=fx.Flow('Steam', bus='Steam', size=500),\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n fx.linear_converters.Boiler(\n 'BackupBoiler',\n thermal_efficiency=0.85,\n thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n fx.Sink('Factory', inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)]),\n)\n\nfs_unconstrained.optimize(fx.solvers.HighsSolver())\nunconstrained_costs = fs_unconstrained.solution['costs'].item()\n\nprint('=== Comparison ===')\nprint(f'With constraints: {total_costs:.2f} €')\nprint(f'Without constraints: {unconstrained_costs:.2f} €')\nprint(\n f'Constraint cost: {total_costs - unconstrained_costs:.2f} € ({(total_costs - unconstrained_costs) / unconstrained_costs * 100:.1f}%)'\n)", "outputs": [], - "execution_count": null + "execution_count": null, + "source": [ + "# Build unconstrained system\n", + "fs_unconstrained = fx.FlowSystem(timesteps)\n", + "fs_unconstrained.add_carriers(\n", + " fx.Carrier('gas', '#3498db', 'kW'),\n", + " fx.Carrier('steam', '#87CEEB', 'kW_th', 'Process steam'),\n", + ")\n", + "\n", + "fs_unconstrained.add_elements(\n", + " fx.Bus('Gas', carrier='gas'),\n", + " fx.Bus('Steam', carrier='steam'),\n", + " fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n", + " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n", + " # Main boiler WITHOUT status parameters\n", + " fx.linear_converters.Boiler(\n", + " 'MainBoiler',\n", + " thermal_efficiency=0.94,\n", + " thermal_flow=fx.Flow('Steam', bus='Steam', size=500),\n", + " fuel_flow=fx.Flow('Gas', bus='Gas'),\n", + " ),\n", + " fx.linear_converters.Boiler(\n", + " 'BackupBoiler',\n", + " thermal_efficiency=0.85,\n", + " thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n", + " fuel_flow=fx.Flow('Gas', bus='Gas'),\n", + " ),\n", + " fx.Sink('Factory', inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)]),\n", + ")\n", + "\n", + "fs_unconstrained.optimize(fx.solvers.HighsSolver())\n", + "unconstrained_costs = fs_unconstrained.solution['costs'].item()\n", + "\n", + "print('=== Comparison ===')\n", + "print(f'With constraints: {total_costs:.2f} €')\n", + "print(f'Without constraints: {unconstrained_costs:.2f} €')\n", + "print(\n", + " f'Constraint cost: {total_costs - unconstrained_costs:.2f} € ({(total_costs - unconstrained_costs) / unconstrained_costs * 100:.1f}%)'\n", + ")" + ], + "id": "8769dbda34dd4ccf" }, { - "cell_type": "markdown", - "id": "24", "metadata": {}, + "cell_type": "markdown", "source": [ "### Energy Flow Sankey\n", "\n", "A Sankey diagram visualizes the total energy flows through the system:" - ] + ], + "id": "64ddc254af867367" }, { + "metadata": {}, "cell_type": "code", - "id": "25", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T10:09:42.358234Z", - "start_time": "2025-12-12T10:09:42.304711Z" - } - }, - "source": [ - "flow_system.statistics.plot.sankey.flows()" - ], - "outputs": [ - { - "data": { - "text/plain": [ - "PlotResult(data= Size: 1kB\n", - "Dimensions: (link: 6)\n", - "Coordinates:\n", - " * link (link) int64 48B 0 1 2 3 4 5\n", - " source (link) \n", - "
" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "execution_count": 13 + "outputs": [], + "execution_count": null, + "source": "flow_system.statistics.plot.sankey.flows()", + "id": "f2742f4b0a7c5323" }, { - "cell_type": "markdown", - "id": "26", "metadata": {}, + "cell_type": "markdown", "source": [ "## Key Concepts\n", "\n", @@ -4800,21 +432,12 @@ "### Next Steps\n", "\n", "- **[05-multi-carrier-system](05-multi-carrier-system.ipynb)**: Model CHP with electricity and heat\n", - "- **[06-piecewise-efficiency](06-piecewise-efficiency.ipynb)**: Variable efficiency at different loads" - ] + "- **[06a-time-varying-parameters](06a-time-varying-parameters.ipynb)**: Variable efficiency based on external conditions" + ], + "id": "2f9951587227304f" } ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "name": "python", - "version": "3.11" - } - }, + "metadata": {}, "nbformat": 4, "nbformat_minor": 5 } diff --git a/docs/notebooks/05-multi-carrier-system.ipynb b/docs/notebooks/05-multi-carrier-system.ipynb index de2ce1b5f..76de7e69a 100644 --- a/docs/notebooks/05-multi-carrier-system.ipynb +++ b/docs/notebooks/05-multi-carrier-system.ipynb @@ -169,89 +169,7 @@ "id": "9", "metadata": {}, "outputs": [], - "source": [ - "flow_system = fx.FlowSystem(timesteps)\n", - "\n", - "flow_system.add_elements(\n", - " # === Buses with carriers for visual distinction ===\n", - " fx.Bus('Electricity', carrier='electricity'),\n", - " fx.Bus('Heat', carrier='heat'),\n", - " fx.Bus('Gas', carrier='gas'),\n", - " # === Effects ===\n", - " fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n", - " fx.Effect('CO2', 'kg', 'CO2 Emissions'), # Track emissions too\n", - " # === Gas Supply ===\n", - " fx.Source(\n", - " 'GasGrid',\n", - " outputs=[\n", - " fx.Flow(\n", - " 'Gas',\n", - " bus='Gas',\n", - " size=1000,\n", - " effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2}, # Gas: 0.2 kg CO2/kWh\n", - " )\n", - " ],\n", - " ),\n", - " # === Electricity Grid (buy) ===\n", - " fx.Source(\n", - " 'GridBuy',\n", - " outputs=[\n", - " fx.Flow(\n", - " 'Electricity',\n", - " bus='Electricity',\n", - " size=500,\n", - " effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}, # Grid: 0.4 kg CO2/kWh\n", - " )\n", - " ],\n", - " ),\n", - " # === Electricity Grid (sell) - negative cost = revenue ===\n", - " fx.Sink(\n", - " 'GridSell',\n", - " inputs=[\n", - " fx.Flow(\n", - " 'Electricity',\n", - " bus='Electricity',\n", - " size=200,\n", - " effects_per_flow_hour={'costs': -elec_sell_price}, # Negative = income\n", - " )\n", - " ],\n", - " ),\n", - " # === CHP Unit (Combined Heat and Power) ===\n", - " fx.linear_converters.CHP(\n", - " 'CHP',\n", - " electrical_efficiency=0.40, # 40% to electricity\n", - " thermal_efficiency=0.50, # 50% to heat (total: 90%)\n", - " status_parameters=fx.StatusParameters(\n", - " effects_per_startup={'costs': 30},\n", - " min_uptime=3,\n", - " ),\n", - " electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n", - " thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n", - " fuel_flow=fx.Flow(\n", - " 'Q_fuel',\n", - " bus='Gas',\n", - " size=500,\n", - " relative_minimum=0.4, # Min 40% load\n", - " ),\n", - " ),\n", - " # === Gas Boiler (heat only) ===\n", - " fx.linear_converters.Boiler(\n", - " 'Boiler',\n", - " thermal_efficiency=0.92,\n", - " thermal_flow=fx.Flow('Q_th', bus='Heat', size=400),\n", - " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n", - " ),\n", - " # === Hospital Loads ===\n", - " fx.Sink(\n", - " 'HospitalElec',\n", - " inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],\n", - " ),\n", - " fx.Sink(\n", - " 'HospitalHeat',\n", - " inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n", - " ),\n", - ")" - ] + "source": "flow_system = fx.FlowSystem(timesteps)\nflow_system.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nflow_system.add_elements(\n # === Buses with carriers for visual distinction ===\n fx.Bus('Electricity', carrier='electricity'),\n fx.Bus('Heat', carrier='heat'),\n fx.Bus('Gas', carrier='gas'),\n # === Effects ===\n fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n fx.Effect('CO2', 'kg', 'CO2 Emissions'), # Track emissions too\n # === Gas Supply ===\n fx.Source(\n 'GasGrid',\n outputs=[\n fx.Flow(\n 'Gas',\n bus='Gas',\n size=1000,\n effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2}, # Gas: 0.2 kg CO2/kWh\n )\n ],\n ),\n # === Electricity Grid (buy) ===\n fx.Source(\n 'GridBuy',\n outputs=[\n fx.Flow(\n 'Electricity',\n bus='Electricity',\n size=500,\n effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}, # Grid: 0.4 kg CO2/kWh\n )\n ],\n ),\n # === Electricity Grid (sell) - negative cost = revenue ===\n fx.Sink(\n 'GridSell',\n inputs=[\n fx.Flow(\n 'Electricity',\n bus='Electricity',\n size=200,\n effects_per_flow_hour={'costs': -elec_sell_price}, # Negative = income\n )\n ],\n ),\n # === CHP Unit (Combined Heat and Power) ===\n fx.linear_converters.CHP(\n 'CHP',\n electrical_efficiency=0.40, # 40% to electricity\n thermal_efficiency=0.50, # 50% to heat (total: 90%)\n status_parameters=fx.StatusParameters(\n effects_per_startup={'costs': 30},\n min_uptime=3,\n ),\n electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n fuel_flow=fx.Flow(\n 'Q_fuel',\n bus='Gas',\n size=500,\n relative_minimum=0.4, # Min 40% load\n ),\n ),\n # === Gas Boiler (heat only) ===\n fx.linear_converters.Boiler(\n 'Boiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow('Q_th', bus='Heat', size=400),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n ),\n # === Hospital Loads ===\n fx.Sink(\n 'HospitalElec',\n inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],\n ),\n fx.Sink(\n 'HospitalHeat',\n inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n ),\n)" }, { "cell_type": "markdown", @@ -407,52 +325,7 @@ "id": "23", "metadata": {}, "outputs": [], - "source": [ - "# Build system without CHP\n", - "fs_no_chp = fx.FlowSystem(timesteps)\n", - "\n", - "fs_no_chp.add_elements(\n", - " fx.Bus('Electricity', carrier='electricity'),\n", - " fx.Bus('Heat', carrier='heat'),\n", - " fx.Bus('Gas', carrier='gas'),\n", - " fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n", - " fx.Effect('CO2', 'kg', 'CO2 Emissions'),\n", - " fx.Source(\n", - " 'GasGrid',\n", - " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],\n", - " ),\n", - " fx.Source(\n", - " 'GridBuy',\n", - " outputs=[\n", - " fx.Flow(\n", - " 'Electricity', bus='Electricity', size=500, effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}\n", - " )\n", - " ],\n", - " ),\n", - " # Only boiler for heat\n", - " fx.linear_converters.Boiler(\n", - " 'Boiler',\n", - " thermal_efficiency=0.92,\n", - " thermal_flow=fx.Flow('Q_th', bus='Heat', size=500),\n", - " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n", - " ),\n", - " fx.Sink(\n", - " 'HospitalElec', inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]\n", - " ),\n", - " fx.Sink('HospitalHeat', inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n", - ")\n", - "\n", - "fs_no_chp.optimize(fx.solvers.HighsSolver())\n", - "\n", - "no_chp_costs = fs_no_chp.solution['costs'].item()\n", - "no_chp_co2 = fs_no_chp.solution['CO2'].item()\n", - "\n", - "print('=== CHP Benefit Analysis ===')\n", - "print(f'Without CHP: {no_chp_costs:.2f} € / {no_chp_co2:.0f} kg CO2')\n", - "print(f'With CHP: {total_costs:.2f} € / {total_co2:.0f} kg CO2')\n", - "print(f'Cost savings: {no_chp_costs - total_costs:.2f} € ({(no_chp_costs - total_costs) / no_chp_costs * 100:.1f}%)')\n", - "print(f'CO2 reduction: {no_chp_co2 - total_co2:.0f} kg ({(no_chp_co2 - total_co2) / no_chp_co2 * 100:.1f}%)')" - ] + "source": "# Build system without CHP\nfs_no_chp = fx.FlowSystem(timesteps)\nfs_no_chp.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nfs_no_chp.add_elements(\n fx.Bus('Electricity', carrier='electricity'),\n fx.Bus('Heat', carrier='heat'),\n fx.Bus('Gas', carrier='gas'),\n fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n fx.Effect('CO2', 'kg', 'CO2 Emissions'),\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],\n ),\n fx.Source(\n 'GridBuy',\n outputs=[\n fx.Flow(\n 'Electricity', bus='Electricity', size=500, effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}\n )\n ],\n ),\n # Only boiler for heat\n fx.linear_converters.Boiler(\n 'Boiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow('Q_th', bus='Heat', size=500),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n ),\n fx.Sink(\n 'HospitalElec', inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]\n ),\n fx.Sink('HospitalHeat', inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n)\n\nfs_no_chp.optimize(fx.solvers.HighsSolver())\n\nno_chp_costs = fs_no_chp.solution['costs'].item()\nno_chp_co2 = fs_no_chp.solution['CO2'].item()\n\nprint('=== CHP Benefit Analysis ===')\nprint(f'Without CHP: {no_chp_costs:.2f} € / {no_chp_co2:.0f} kg CO2')\nprint(f'With CHP: {total_costs:.2f} € / {total_co2:.0f} kg CO2')\nprint(f'Cost savings: {no_chp_costs - total_costs:.2f} € ({(no_chp_costs - total_costs) / no_chp_costs * 100:.1f}%)')\nprint(f'CO2 reduction: {no_chp_co2 - total_co2:.0f} kg ({(no_chp_co2 - total_co2) / no_chp_co2 * 100:.1f}%)')" }, { "cell_type": "markdown", @@ -478,57 +351,7 @@ "cell_type": "markdown", "id": "26", "metadata": {}, - "source": [ - "## Key Concepts\n", - "\n", - "### Multi-Carrier Systems\n", - "\n", - "- Multiple buses for different energy carriers (electricity, heat, gas)\n", - "- Components can connect to multiple buses (CHP produces both electricity and heat)\n", - "- Carriers enable automatic coloring in visualizations\n", - "\n", - "### CHP Modeling\n", - "\n", - "```python\n", - "fx.linear_converters.CHP(\n", - " 'CHP',\n", - " electrical_efficiency=0.40, # Fuel → Electricity\n", - " thermal_efficiency=0.50, # Fuel → Heat\n", - " # Total efficiency = 0.40 + 0.50 = 0.90 (90%)\n", - " electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n", - " thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n", - " fuel_flow=fx.Flow('Q_fuel', bus='Gas', size=500),\n", - ")\n", - "```\n", - "\n", - "### Electricity Markets\n", - "\n", - "- **Buy**: Source with positive cost\n", - "- **Sell**: Sink with negative cost (= revenue)\n", - "- Different prices for buy vs. sell (spread)\n", - "\n", - "### Tracking Multiple Effects\n", - "\n", - "```python\n", - "fx.Effect('costs', '€', 'Total Costs', is_objective=True) # Minimize this\n", - "fx.Effect('CO2', 'kg', 'CO2 Emissions') # Just track, don't optimize\n", - "```\n", - "\n", - "## Summary\n", - "\n", - "You learned how to:\n", - "\n", - "- Model **multiple energy carriers** (electricity, heat, gas)\n", - "- Use **CHP** for combined heat and power production\n", - "- Model **electricity markets** with buy/sell prices\n", - "- Track **multiple effects** (costs and emissions)\n", - "- Analyze **multi-carrier balances**\n", - "\n", - "### Next Steps\n", - "\n", - "- **[06-piecewise-efficiency](06-piecewise-efficiency.ipynb)**: Model variable efficiency curves\n", - "- **[07-scenarios-and-periods](07-scenarios-and-periods.ipynb)**: Plan under uncertainty" - ] + "source": "## Key Concepts\n\n### Multi-Carrier Systems\n\n- Multiple buses for different energy carriers (electricity, heat, gas)\n- Components can connect to multiple buses (CHP produces both electricity and heat)\n- Carriers enable automatic coloring in visualizations\n\n### CHP Modeling\n\n```python\nfx.linear_converters.CHP(\n 'CHP',\n electrical_efficiency=0.40, # Fuel → Electricity\n thermal_efficiency=0.50, # Fuel → Heat\n # Total efficiency = 0.40 + 0.50 = 0.90 (90%)\n electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas', size=500),\n)\n```\n\n### Electricity Markets\n\n- **Buy**: Source with positive cost\n- **Sell**: Sink with negative cost (= revenue)\n- Different prices for buy vs. sell (spread)\n\n### Tracking Multiple Effects\n\n```python\nfx.Effect('costs', '€', 'Total Costs', is_objective=True) # Minimize this\nfx.Effect('CO2', 'kg', 'CO2 Emissions') # Just track, don't optimize\n```\n\n## Summary\n\nYou learned how to:\n\n- Model **multiple energy carriers** (electricity, heat, gas)\n- Use **CHP** for combined heat and power production\n- Model **electricity markets** with buy/sell prices\n- Track **multiple effects** (costs and emissions)\n- Analyze **multi-carrier balances**\n\n### Next Steps\n\n- **[06a-time-varying-parameters](06a-time-varying-parameters.ipynb)**: Variable efficiency based on conditions\n- **[07-scenarios-and-periods](07-scenarios-and-periods.ipynb)**: Plan under uncertainty" } ], "metadata": { diff --git a/docs/notebooks/06-piecewise-efficiency.ipynb b/docs/notebooks/06-piecewise-efficiency.ipynb deleted file mode 100644 index bb50793ff..000000000 --- a/docs/notebooks/06-piecewise-efficiency.ipynb +++ /dev/null @@ -1,633 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": "# Piecewise\n\nHeat pump with temperature-dependent COP and part-load curves.\n\nThis notebook introduces:\n\n- **Piecewise linear functions**: Approximate non-linear behavior\n- **Variable efficiency**: COP changes with operating conditions\n- **LinearConverter with segments**: Multiple operating points\n- **Piecewise effects**: Non-linear cost curves" - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "## Setup" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import pandas as pd\n", - "import plotly.express as px\n", - "import xarray as xr\n", - "\n", - "import flixopt as fx\n", - "\n", - "fx.CONFIG.notebook()" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## The Problem: Variable Heat Pump Efficiency\n", - "\n", - "A heat pump's COP (Coefficient of Performance) depends on the temperature difference between source and sink:\n", - "\n", - "- **Mild weather** (10°C outside): COP ≈ 4.5 (1 kWh electricity → 4.5 kWh heat)\n", - "- **Cold weather** (-5°C outside): COP ≈ 2.5 (1 kWh electricity → 2.5 kWh heat)\n", - "\n", - "This non-linear relationship can be approximated using piecewise linear segments." - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "## Define Time Series Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# One winter week\n", - "timesteps = pd.date_range('2024-01-22', periods=168, freq='h')\n", - "hours = np.arange(168)\n", - "hour_of_day = hours % 24\n", - "\n", - "# Outdoor temperature: daily cycle with cold nights\n", - "temp_base = 2 # Average temp\n", - "temp_amplitude = 5 # Daily variation\n", - "outdoor_temp = temp_base + temp_amplitude * np.sin((hour_of_day - 6) * np.pi / 12)\n", - "\n", - "# Add day-to-day variation\n", - "np.random.seed(789)\n", - "daily_offset = np.repeat(np.random.uniform(-3, 3, 7), 24)\n", - "outdoor_temp = outdoor_temp + daily_offset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Heat demand: inversely related to outdoor temp (higher demand when colder)\n", - "heat_demand = 200 - 8 * outdoor_temp\n", - "heat_demand = np.clip(heat_demand, 100, 300)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "# Visualize with plotly - using xarray and faceting\n", - "profiles = xr.Dataset(\n", - " {\n", - " 'Outdoor Temp [°C]': xr.DataArray(outdoor_temp, dims=['time'], coords={'time': timesteps}),\n", - " 'Heat Demand [kW]': xr.DataArray(heat_demand, dims=['time'], coords={'time': timesteps}),\n", - " }\n", - ")\n", - "\n", - "df = profiles.to_dataframe().reset_index().melt(id_vars='time', var_name='variable', value_name='value')\n", - "fig = px.line(df, x='time', y='value', facet_col='variable', height=300)\n", - "fig.update_yaxes(matches=None, showticklabels=True)\n", - "fig.for_each_annotation(lambda a: a.update(text=a.text.split('=')[-1]))\n", - "fig" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## Calculate Time-Varying COP\n", - "\n", - "The COP depends on outdoor temperature. We use a simplified Carnot-based formula:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# COP calculation (simplified): Real COP ≈ 0.45 * Carnot COP\n", - "T_supply = 45 + 273.15 # Supply temperature 45°C in Kelvin\n", - "T_source = outdoor_temp + 273.15 # Outdoor temp in Kelvin\n", - "\n", - "carnot_cop = T_supply / (T_supply - T_source)\n", - "real_cop = 0.45 * carnot_cop\n", - "real_cop = np.clip(real_cop, 2.0, 5.0) # Physical limits" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# Plot COP vs temperature using plotly\n", - "px.scatter(\n", - " x=outdoor_temp,\n", - " y=real_cop,\n", - " title='Heat Pump COP vs Outdoor Temperature',\n", - " labels={'x': 'Outdoor Temperature [°C]', 'y': 'COP'},\n", - " opacity=0.5,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "## Approach 1: Simple Model with Time-Varying Efficiency\n", - "\n", - "The simplest approach: use time-varying conversion factors directly." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "flow_system = fx.FlowSystem(timesteps)\n", - "\n", - "flow_system.add_elements(\n", - " fx.Bus('Electricity', carrier='electricity'),\n", - " fx.Bus('Heat', carrier='heat'),\n", - " fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n", - " fx.Source('Grid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=0.30)]),\n", - " fx.LinearConverter(\n", - " 'HeatPump',\n", - " inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n", - " outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n", - " conversion_factors=[{'Elec': real_cop, 'Heat': 1}], # Time-varying COP\n", - " ),\n", - " fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n", - ")\n", - "\n", - "flow_system.optimize(fx.solvers.HighsSolver())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "flow_system.statistics.plot.balance('Heat')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "flow_system.statistics.plot.balance('Electricity')" - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": {}, - "source": [ - "## Approach 2: Simple vs Piecewise Efficiency - Model Refinement\n", - "\n", - "Real equipment often has non-linear efficiency curves. Let's compare:\n", - "1. **Simple model**: Constant average efficiency\n", - "2. **Refined model**: Piecewise linear efficiency that varies with load\n", - "\n", - "This demonstrates how to progressively refine a model for more accurate results." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "# Gas engine efficiency varies with load:\n", - "# - Part load: lower efficiency\n", - "# - Mid load: optimal efficiency (~42%)\n", - "# - Full load: slightly lower efficiency\n", - "\n", - "SIMPLE_EFFICIENCY = 0.38 # Average efficiency for simple model\n", - "\n", - "# Piecewise model: efficiency varies by operating segment\n", - "piecewise_efficiency = fx.PiecewiseConversion(\n", - " {\n", - " 'Fuel': fx.Piecewise(\n", - " [\n", - " fx.Piece(start=78, end=132), # Part load\n", - " fx.Piece(start=132, end=179), # Mid load (optimal)\n", - " fx.Piece(start=179, end=250), # High load\n", - " ]\n", - " ),\n", - " 'Elec': fx.Piecewise(\n", - " [\n", - " fx.Piece(start=25, end=50),\n", - " fx.Piece(start=50, end=75),\n", - " fx.Piece(start=75, end=100),\n", - " ]\n", - " ),\n", - " }\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "# 2-day demand profile\n", - "timesteps_simple = pd.date_range('2024-01-22', periods=48, freq='h')\n", - "elec_demand_simple = np.concatenate([np.linspace(30, 90, 24), np.linspace(90, 30, 24)])\n", - "\n", - "# MODEL 1: Simple constant efficiency\n", - "fs_simple = fx.FlowSystem(timesteps_simple)\n", - "fs_simple.add_elements(\n", - " fx.Bus('Gas', carrier='gas'),\n", - " fx.Bus('Electricity', carrier='electricity'),\n", - " fx.Effect('costs', '€', 'Costs', is_standard=True, is_objective=True),\n", - " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)]),\n", - " fx.LinearConverter(\n", - " 'GasEngine',\n", - " inputs=[fx.Flow('Fuel', bus='Gas', size=300)],\n", - " outputs=[fx.Flow('Elec', bus='Electricity', size=100)],\n", - " conversion_factors=[{'Fuel': 1, 'Elec': SIMPLE_EFFICIENCY}],\n", - " ),\n", - " fx.Sink('Load', inputs=[fx.Flow('Elec', bus='Electricity', size=1, fixed_relative_profile=elec_demand_simple)]),\n", - ")\n", - "fs_simple.optimize(fx.solvers.HighsSolver())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# MODEL 2: Piecewise efficiency (load-dependent)\n", - "fs_piecewise = fx.FlowSystem(timesteps_simple)\n", - "fs_piecewise.add_elements(\n", - " fx.Bus('Gas', carrier='gas'),\n", - " fx.Bus('Electricity', carrier='electricity'),\n", - " fx.Effect('costs', '€', 'Costs', is_standard=True, is_objective=True),\n", - " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)]),\n", - " fx.LinearConverter(\n", - " 'GasEngine',\n", - " inputs=[fx.Flow('Fuel', bus='Gas')],\n", - " outputs=[fx.Flow('Elec', bus='Electricity')],\n", - " piecewise_conversion=piecewise_efficiency,\n", - " ),\n", - " fx.Sink('Load', inputs=[fx.Flow('Elec', bus='Electricity', size=1, fixed_relative_profile=elec_demand_simple)]),\n", - ")\n", - "fs_piecewise.optimize(fx.solvers.HighsSolver())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "# Visualize the piecewise conversion curve\n", - "fs_piecewise.components['GasEngine'].piecewise_conversion.plot(\n", - " x_flow='Fuel', title='Gas Engine: Fuel Input vs Electricity Output'\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# Compare: Simple vs Piecewise efficiency\n", - "print(f'Simple model: {fs_simple.solution[\"costs\"].item():.2f} €')\n", - "print(f'Piecewise model: {fs_piecewise.solution[\"costs\"].item():.2f} €')" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "## Approach 3: Simple vs Piecewise Investment Costs\n", - "\n", - "Investment costs often have economies of scale - larger systems cost less per unit." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "fs_piecewise.statistics.plot.balance('Gas')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "# Investment costs (daily amortized)\n", - "SIMPLE_INVEST_COST = 0.20 # €/kWh constant\n", - "\n", - "# Piecewise: economies of scale (0.20 → 0.15 → 0.10 €/kWh)\n", - "piecewise_invest_costs = fx.PiecewiseEffects(\n", - " piecewise_origin=fx.Piecewise(\n", - " [\n", - " fx.Piece(start=0, end=100),\n", - " fx.Piece(start=100, end=300),\n", - " fx.Piece(start=300, end=600),\n", - " ]\n", - " ),\n", - " piecewise_shares={\n", - " 'costs': fx.Piecewise(\n", - " [\n", - " fx.Piece(start=0, end=20), # 0.20 €/kWh\n", - " fx.Piece(start=20, end=50), # 0.15 €/kWh\n", - " fx.Piece(start=50, end=80), # 0.10 €/kWh\n", - " ]\n", - " )\n", - " },\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "# Base system: Price arbitrage scenario (3 days)\n", - "timesteps_invest = pd.date_range('2024-01-22', periods=72, freq='h')\n", - "\n", - "heat_demand_invest = np.tile(\n", - " np.concatenate(\n", - " [\n", - " np.full(8, 100),\n", - " np.full(4, 120),\n", - " np.full(4, 110),\n", - " np.full(4, 130),\n", - " np.full(4, 105),\n", - " ]\n", - " ),\n", - " 3,\n", - ")\n", - "\n", - "energy_price_invest = np.tile(\n", - " np.concatenate(\n", - " [\n", - " np.full(8, 0.08),\n", - " np.full(4, 0.20),\n", - " np.full(4, 0.12),\n", - " np.full(4, 0.25),\n", - " np.full(4, 0.10),\n", - " ]\n", - " ),\n", - " 3,\n", - ")\n", - "\n", - "fs_base = fx.FlowSystem(timesteps_invest)\n", - "fs_base.add_elements(\n", - " fx.Bus('Heat', carrier='heat'),\n", - " fx.Effect('costs', '€', 'Costs', is_standard=True, is_objective=True),\n", - " fx.Source('HeatSource', outputs=[fx.Flow('Heat', bus='Heat', size=300, effects_per_flow_hour=energy_price_invest)]),\n", - " fx.Sink('HeatSink', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand_invest)]),\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "# MODEL 1: Simple linear investment costs\n", - "fs_simple_invest = fs_base.copy()\n", - "fs_simple_invest.add_elements(\n", - " fx.Storage(\n", - " 'ThermalStorage',\n", - " charging=fx.Flow('charge', bus='Heat', size=200),\n", - " discharging=fx.Flow('discharge', bus='Heat', size=200),\n", - " capacity_in_flow_hours=fx.InvestParameters(\n", - " effects_of_investment_per_size=SIMPLE_INVEST_COST,\n", - " minimum_size=0,\n", - " maximum_size=600,\n", - " ),\n", - " initial_charge_state=0,\n", - " eta_charge=0.95,\n", - " eta_discharge=0.95,\n", - " relative_loss_per_hour=0.005,\n", - " ),\n", - ")\n", - "fs_simple_invest.optimize(fx.solvers.HighsSolver())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "# MODEL 2: Piecewise investment costs (economies of scale)\n", - "fs_piecewise_invest = fs_base.copy()\n", - "fs_piecewise_invest.add_elements(\n", - " fx.Storage(\n", - " 'ThermalStorage',\n", - " charging=fx.Flow('charge', bus='Heat', size=200),\n", - " discharging=fx.Flow('discharge', bus='Heat', size=200),\n", - " capacity_in_flow_hours=fx.InvestParameters(\n", - " piecewise_effects_of_investment=piecewise_invest_costs,\n", - " minimum_size=0,\n", - " maximum_size=600,\n", - " ),\n", - " initial_charge_state=0,\n", - " eta_charge=0.95,\n", - " eta_discharge=0.95,\n", - " relative_loss_per_hour=0.005,\n", - " ),\n", - ")\n", - "fs_piecewise_invest.optimize(fx.solvers.HighsSolver())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "# Visualize the piecewise investment cost curve\n", - "# Access through the storage component after it's part of the FlowSystem\n", - "fs_piecewise_invest.components['ThermalStorage'].capacity_in_flow_hours.piecewise_effects_of_investment.plot(\n", - " title='Storage Investment Costs (Economies of Scale)'\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [ - "# Compare: Simple vs Piecewise investment\n", - "print(\n", - " f'Simple model: {fs_simple_invest.solution[\"ThermalStorage|size\"].item():.0f} kWh, {fs_simple_invest.solution[\"costs\"].item():.2f} €'\n", - ")\n", - "print(\n", - " f'Piecewise model: {fs_piecewise_invest.solution[\"ThermalStorage|size\"].item():.0f} kWh, {fs_piecewise_invest.solution[\"costs\"].item():.2f} €'\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "29", - "metadata": {}, - "source": [ - "### Visualize Storage Operation and Energy Flows" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [ - "# Storage operation visualization\n", - "fs_piecewise_invest.statistics.plot.heatmap('ThermalStorage')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [ - "fs_piecewise_invest.statistics.plot.sankey.flows()" - ] - }, - { - "cell_type": "markdown", - "id": "32", - "metadata": {}, - "source": [ - "## Key Concepts\n", - "\n", - "### Time-Varying Conversion Factors\n", - "\n", - "For temperature-dependent efficiency, use arrays with flow labels as keys:\n", - "```python\n", - "fx.LinearConverter(\n", - " 'HeatPump',\n", - " inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n", - " outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n", - " # Equation: Elec * COP = Heat * 1\n", - " conversion_factors=[{'Elec': cop_array, 'Heat': 1}],\n", - ")\n", - "```\n", - "\n", - "### Piecewise Linear Functions\n", - "\n", - "For non-linear relationships (part-load efficiency), use `PiecewiseConversion`:\n", - "```python\n", - "fx.PiecewiseConversion({\n", - " 'Fuel': fx.Piecewise([\n", - " fx.Piece(start=78, end=132), # Segment 1\n", - " fx.Piece(start=132, end=179), # Segment 2\n", - " ]),\n", - " 'Elec': fx.Piecewise([\n", - " fx.Piece(start=25, end=50), # Segment 1\n", - " fx.Piece(start=50, end=75), # Segment 2\n", - " ]),\n", - "})\n", - "```\n", - "\n", - "### When to Use Each Approach\n", - "\n", - "| Approach | Use Case | Complexity |\n", - "|----------|----------|------------|\n", - "| Time-varying factors | Efficiency depends on external conditions (temp, price) | Low |\n", - "| PiecewiseConversion | Efficiency depends on load level | Medium |\n", - "| PiecewiseEffects | Non-linear costs (economies of scale) | Medium |\n", - "\n", - "## Summary\n", - "\n", - "You learned how to:\n", - "\n", - "- Model **time-varying efficiency** using conversion factor arrays\n", - "- Use **PiecewiseConversion** for load-dependent efficiency curves\n", - "- Apply **PiecewiseEffects** for non-linear cost functions\n", - "- Choose the right approach for your modeling needs\n", - "\n", - "### Next Steps\n", - "\n", - "- **[07-scenarios-and-periods](07-scenarios-and-periods.ipynb)**: Multi-scenario planning\n", - "- **[08-large-scale-optimization](08-large-scale-optimization.ipynb)**: Computational efficiency techniques" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.11" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/notebooks/06a-time-varying-parameters.ipynb b/docs/notebooks/06a-time-varying-parameters.ipynb new file mode 100644 index 000000000..9856aa095 --- /dev/null +++ b/docs/notebooks/06a-time-varying-parameters.ipynb @@ -0,0 +1,339 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Time-Varying Parameters\n", + "\n", + "Model equipment with efficiency that changes based on external conditions.\n", + "\n", + "This notebook covers:\n", + "\n", + "- **Time-varying conversion factors**: Efficiency depends on external conditions\n", + "- **Temperature-dependent COP**: Heat pump performance varies with weather\n", + "- **Practical application**: Using arrays in conversion factor definitions" + ] + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "code", + "id": "2", + "metadata": {}, + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "import plotly.express as px\n", + "import xarray as xr\n", + "\n", + "import flixopt as fx\n", + "\n", + "fx.CONFIG.notebook()" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "## The Problem: Variable Heat Pump Efficiency\n", + "\n", + "A heat pump's COP (Coefficient of Performance) depends on the temperature difference between source and sink:\n", + "\n", + "- **Mild weather** (10°C outside): COP ≈ 4.5 (1 kWh electricity → 4.5 kWh heat)\n", + "- **Cold weather** (-5°C outside): COP ≈ 2.5 (1 kWh electricity → 2.5 kWh heat)\n", + "\n", + "This time-varying relationship can be modeled directly using arrays in the conversion factors.\n", + "\n", + "### When to Use This Approach\n", + "\n", + "Use time-varying conversion factors when:\n", + "- Efficiency depends on **external conditions** (temperature, solar irradiance, humidity)\n", + "- The relationship is **independent of the load level**\n", + "- You have **measured or forecast data** for the efficiency profile" + ] + }, + { + "cell_type": "markdown", + "id": "4", + "metadata": {}, + "source": [ + "## Define Time Series Data" + ] + }, + { + "cell_type": "code", + "id": "5", + "metadata": {}, + "source": [ + "# One winter week\n", + "timesteps = pd.date_range('2024-01-22', periods=168, freq='h')\n", + "hours = np.arange(168)\n", + "hour_of_day = hours % 24\n", + "\n", + "# Outdoor temperature: daily cycle with cold nights\n", + "temp_base = 2 # Average temp in °C\n", + "temp_amplitude = 5 # Daily variation\n", + "outdoor_temp = temp_base + temp_amplitude * np.sin((hour_of_day - 6) * np.pi / 12)\n", + "\n", + "# Add day-to-day variation for realism\n", + "np.random.seed(789)\n", + "daily_offset = np.repeat(np.random.uniform(-3, 3, 7), 24)\n", + "outdoor_temp = outdoor_temp + daily_offset" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "code", + "id": "6", + "metadata": {}, + "source": [ + "# Heat demand: inversely related to outdoor temp (higher demand when colder)\n", + "heat_demand = 200 - 8 * outdoor_temp\n", + "heat_demand = np.clip(heat_demand, 100, 300)" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "code", + "id": "7", + "metadata": {}, + "source": [ + "# Visualize input profiles\n", + "profiles = xr.Dataset(\n", + " {\n", + " 'Outdoor Temp [°C]': xr.DataArray(outdoor_temp, dims=['time'], coords={'time': timesteps}),\n", + " 'Heat Demand [kW]': xr.DataArray(heat_demand, dims=['time'], coords={'time': timesteps}),\n", + " }\n", + ")\n", + "\n", + "df = profiles.to_dataframe().reset_index().melt(id_vars='time', var_name='variable', value_name='value')\n", + "fig = px.line(df, x='time', y='value', facet_col='variable', height=300)\n", + "fig.update_yaxes(matches=None, showticklabels=True)\n", + "fig.for_each_annotation(lambda a: a.update(text=a.text.split('=')[-1]))\n", + "fig" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "id": "8", + "metadata": {}, + "source": [ + "## Calculate Time-Varying COP\n", + "\n", + "The COP depends on outdoor temperature. We use a simplified Carnot-based formula:\n", + "\n", + "$$\\text{COP}_{\\text{real}} \\approx 0.45 \\times \\text{COP}_{\\text{Carnot}} = 0.45 \\times \\frac{T_{\\text{supply}}}{T_{\\text{supply}} - T_{\\text{source}}}$$\n", + "\n", + "where temperatures are in Kelvin." + ] + }, + { + "cell_type": "code", + "id": "9", + "metadata": {}, + "source": [ + "# COP calculation\n", + "T_supply = 45 + 273.15 # Supply temperature 45°C in Kelvin\n", + "T_source = outdoor_temp + 273.15 # Outdoor temp in Kelvin\n", + "\n", + "carnot_cop = T_supply / (T_supply - T_source)\n", + "real_cop = 0.45 * carnot_cop\n", + "real_cop = np.clip(real_cop, 2.0, 5.0) # Physical limits" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "code", + "id": "10", + "metadata": {}, + "source": [ + "# Visualize COP vs temperature relationship\n", + "px.scatter(\n", + " x=outdoor_temp,\n", + " y=real_cop,\n", + " title='Heat Pump COP vs Outdoor Temperature',\n", + " labels={'x': 'Outdoor Temperature [°C]', 'y': 'COP'},\n", + " opacity=0.5,\n", + ")" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "id": "11", + "metadata": {}, + "source": [ + "## Build the Model\n", + "\n", + "The key is passing the COP array directly to `conversion_factors`. The equation becomes:\n", + "\n", + "$$\\text{Elec} \\times \\text{COP}(t) = \\text{Heat} \\times 1$$\n", + "\n", + "where `COP(t)` varies at each timestep." + ] + }, + { + "cell_type": "code", + "id": "12", + "metadata": {}, + "source": "flow_system = fx.FlowSystem(timesteps)\nflow_system.add_carriers(\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nflow_system.add_elements(\n # Buses\n fx.Bus('Electricity', carrier='electricity'),\n fx.Bus('Heat', carrier='heat'),\n # Effect for cost tracking\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # Grid electricity source\n fx.Source('Grid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=0.30)]),\n # Heat pump with TIME-VARYING COP\n fx.LinearConverter(\n 'HeatPump',\n inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n conversion_factors=[{'Elec': real_cop, 'Heat': 1}], # <-- Array for time-varying COP\n ),\n # Heat demand\n fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n)\n\nflow_system.optimize(fx.solvers.HighsSolver())", + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "id": "13", + "metadata": {}, + "source": [ + "## Analyze Results" + ] + }, + { + "cell_type": "code", + "id": "14", + "metadata": {}, + "source": [ + "flow_system.statistics.plot.balance('Heat')" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "code", + "id": "15", + "metadata": {}, + "source": [ + "flow_system.statistics.plot.balance('Electricity')" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "code", + "id": "16", + "metadata": {}, + "source": [ + "# Compare electricity consumption vs heat output using xarray for alignment\n", + "# Create dataset with solution and input data - xarray auto-aligns by time coordinate\n", + "comparison = xr.Dataset(\n", + " {\n", + " 'elec_consumption': flow_system.solution['HeatPump(Elec)|flow_rate'],\n", + " 'heat_output': flow_system.solution['HeatPump(Heat)|flow_rate'],\n", + " 'outdoor_temp': xr.DataArray(outdoor_temp, dims=['time'], coords={'time': timesteps}),\n", + " }\n", + ")\n", + "\n", + "# Calculate effective COP at each timestep\n", + "comparison['effective_cop'] = xr.where(\n", + " comparison['elec_consumption'] > 0.1, comparison['heat_output'] / comparison['elec_consumption'], np.nan\n", + ")\n", + "\n", + "px.scatter(\n", + " x=comparison['outdoor_temp'].values,\n", + " y=comparison['effective_cop'].values,\n", + " title='Actual Operating COP vs Outdoor Temperature',\n", + " labels={'x': 'Outdoor Temperature [°C]', 'y': 'Operating COP'},\n", + ")" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "id": "17", + "metadata": {}, + "source": [ + "## Key Concepts\n", + "\n", + "### Conversion Factor Syntax\n", + "\n", + "The `conversion_factors` parameter accepts a list of dictionaries where values can be:\n", + "- **Scalars**: Constant efficiency (e.g., `{'Fuel': 1, 'Heat': 0.9}`)\n", + "- **Arrays**: Time-varying efficiency (e.g., `{'Elec': cop_array, 'Heat': 1}`)\n", + "- **TimeSeriesData**: For more complex data with metadata\n", + "\n", + "```python\n", + "fx.LinearConverter(\n", + " 'HeatPump',\n", + " inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n", + " outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n", + " conversion_factors=[{'Elec': cop_array, 'Heat': 1}], # Time-varying\n", + ")\n", + "```\n", + "\n", + "### Physical Interpretation\n", + "\n", + "The conversion equation at each timestep:\n", + "$$\\text{Input}_1 \\times \\text{factor}_1(t) + \\text{Input}_2 \\times \\text{factor}_2(t) + ... = 0$$\n", + "\n", + "For a heat pump: `Elec * COP(t) - Heat * 1 = 0` → `Heat = Elec * COP(t)`\n", + "\n", + "### Common Use Cases\n", + "\n", + "| Equipment | Varying Parameter | External Driver |\n", + "|-----------|-------------------|------------------|\n", + "| Heat pump | COP | Outdoor temperature |\n", + "| Solar PV | Capacity factor | Solar irradiance |\n", + "| Cooling tower | Efficiency | Wet bulb temperature |\n", + "| Gas turbine | Heat rate | Ambient temperature |" + ] + }, + { + "cell_type": "markdown", + "id": "18", + "metadata": {}, + "source": [ + "## Summary\n", + "\n", + "You learned how to:\n", + "\n", + "- Model **time-varying efficiency** using arrays in conversion factors\n", + "- Calculate **temperature-dependent COP** for heat pumps\n", + "- Analyze the **resulting operation** with varying efficiency\n", + "\n", + "### When to Use This vs Other Approaches\n", + "\n", + "| Approach | Use When | Example |\n", + "|----------|----------|--------|\n", + "| **Time-varying factors** (this notebook) | Efficiency varies with external conditions | Heat pump COP vs temperature |\n", + "| **PiecewiseConversion** | Efficiency varies with load level | Gas engine efficiency curve |\n", + "| **PiecewiseEffects** | Costs vary non-linearly with size | Economies of scale |\n", + "\n", + "### Next Steps\n", + "\n", + "- **[06b-piecewise-conversion](06b-piecewise-conversion.ipynb)**: Load-dependent efficiency curves\n", + "- **[06c-piecewise-effects](06c-piecewise-effects.ipynb)**: Non-linear cost functions" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.10.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/notebooks/06b-piecewise-conversion.ipynb b/docs/notebooks/06b-piecewise-conversion.ipynb new file mode 100644 index 000000000..6493a843c --- /dev/null +++ b/docs/notebooks/06b-piecewise-conversion.ipynb @@ -0,0 +1,4371 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Piecewise Conversion\n", + "\n", + "Model equipment with **load-dependent efficiency** using piecewise linear approximation.\n", + "\n", + "**User Story:** A gas engine's efficiency varies with load - lower at part-load, optimal at mid-load. We want to capture this non-linear behavior in our optimization." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "1", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T13:46:20.634505Z", + "start_time": "2025-12-13T13:46:16.763911Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "flixopt.config.CONFIG" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "\n", + "import flixopt as fx\n", + "\n", + "fx.CONFIG.notebook()" + ] + }, + { + "cell_type": "markdown", + "id": "2", + "metadata": {}, + "source": [ + "## The Problem\n", + "\n", + "Real equipment efficiency varies with operating point:\n", + "\n", + "| Load Level | Electrical Efficiency | Reason |\n", + "|------------|----------------------|--------|\n", + "| 25-50% (part load) | 32-38% | Throttling losses |\n", + "| 50-75% (mid load) | 38-42% | Near design point |\n", + "| 75-100% (full load) | 42-40% | Thermal limits |\n", + "\n", + "A constant efficiency assumption misses this behavior." + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "## Define the Efficiency Curve\n", + "\n", + "Each `Piece` defines corresponding fuel input and electricity output ranges:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "4", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T13:46:20.692018Z", + "start_time": "2025-12-13T13:46:20.688366Z" + } + }, + "outputs": [], + "source": [ + "piecewise_efficiency = fx.PiecewiseConversion(\n", + " {\n", + " 'Fuel': fx.Piecewise(\n", + " [\n", + " fx.Piece(start=78, end=132), # Part load\n", + " fx.Piece(start=132, end=179), # Mid load\n", + " fx.Piece(start=179, end=250), # Full load\n", + " ]\n", + " ),\n", + " 'Elec': fx.Piecewise(\n", + " [\n", + " fx.Piece(start=25, end=50), # 32% -> 38% efficiency\n", + " fx.Piece(start=50, end=75), # 38% -> 42% efficiency\n", + " fx.Piece(start=75, end=100), # 42% -> 40% efficiency\n", + " ]\n", + " ),\n", + " }\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "5", + "metadata": {}, + "source": [ + "## Build and Solve" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "6", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T13:46:21.272711Z", + "start_time": "2025-12-13T13:46:20.704350Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running HiGHS 1.12.0 (git hash: 755a8e0): Copyright (c) 2025 HiGHS under MIT licence terms\n", + "MIP linopy-problem-0haogvbp has 298 rows; 394 cols; 1070 nonzeros; 72 integer variables (72 binary)\n", + "Coefficient ranges:\n", + " Matrix [5e-02, 2e+02]\n", + " Cost [1e+00, 1e+00]\n", + " Bound [1e+00, 3e+02]\n", + " RHS [1e+00, 1e+00]\n", + "Presolving model\n", + "168 rows, 240 cols, 672 nonzeros 0s\n", + "119 rows, 214 cols, 428 nonzeros 0s\n", + "97 rows, 60 cols, 115 nonzeros 0s\n", + "6 rows, 10 cols, 20 nonzeros 0s\n", + "Presolve reductions: rows 6(-292); columns 10(-384); nonzeros 20(-1050) \n", + "\n", + "Solving MIP model with:\n", + " 6 rows\n", + " 10 cols (2 binary, 0 integer, 0 implied int., 8 continuous, 0 domain fixed)\n", + " 20 nonzeros\n", + "\n", + "Src: B => Branching; C => Central rounding; F => Feasibility pump; H => Heuristic;\n", + " I => Shifting; J => Feasibility jump; L => Sub-MIP; P => Empty MIP; R => Randomized rounding;\n", + " S => Solve LP; T => Evaluate node; U => Unbounded; X => User solution; Y => HiGHS solution;\n", + " Z => ZI Round; l => Trivial lower; p => Trivial point; u => Trivial upper; z => Trivial zero\n", + "\n", + " Nodes | B&B Tree | Objective Bounds | Dynamic Constraints | Work \n", + "Src Proc. InQueue | Leaves Expl. | BestBound BestSol Gap | Cuts InLp Confl. | LpIters Time\n", + "\n", + " J 0 0 0 100.00% -inf 182.9596783 Large 0 0 0 0 0.0s\n", + " 1 0 1 100.00% 182.9596783 182.9596783 0.00% 0 0 0 0 0.0s\n", + "\n", + "Solving report\n", + " Model linopy-problem-0haogvbp\n", + " Status Optimal\n", + " Primal bound 182.959678343\n", + " Dual bound 182.959678343\n", + " Gap 0% (tolerance: 1%)\n", + " P-D integral 0\n", + " Solution status feasible\n", + " 182.959678343 (objective)\n", + " 0 (bound viol.)\n", + " 0 (int. viol.)\n", + " 0 (row viol.)\n", + " Timing 0.01\n", + " Max sub-MIP depth 0\n", + " Nodes 1\n", + " Repair LPs 0\n", + " LP iterations 0\n" + ] + }, + { + "data": { + "text/plain": [ + "FlowSystem\n", + "==========\n", + "Timesteps: 24 (Hour) [2024-01-22 to 2024-01-22]\n", + "Periods: None\n", + "Scenarios: None\n", + "Status: ✓\n", + "\n", + "Components (3 items)\n", + "--------------------\n", + " * GasEngine\n", + " * GasGrid\n", + " * Load\n", + "\n", + "Buses (2 items)\n", + "---------------\n", + " * Electricity\n", + " * Gas\n", + "\n", + "Effects (2 items)\n", + "-----------------\n", + " * costs\n", + " * Penalty\n", + "\n", + "Flows (4 items)\n", + "---------------\n", + " * GasEngine(Elec)\n", + " * GasEngine(Fuel)\n", + " * GasGrid(Gas)\n", + " * Load(Elec)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "timesteps = pd.date_range('2024-01-22', periods=24, freq='h')\n", + "\n", + "# Demand varies through the day (30-90 kW, within piecewise range 25-100)\n", + "elec_demand = 60 + 30 * np.sin(np.arange(24) * np.pi / 12)\n", + "\n", + "fs = fx.FlowSystem(timesteps)\n", + "fs.add_elements(\n", + " fx.Bus('Gas'),\n", + " fx.Bus('Electricity'),\n", + " fx.Effect('costs', '€', is_standard=True, is_objective=True),\n", + " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=300, effects_per_flow_hour=0.05)]),\n", + " fx.LinearConverter(\n", + " 'GasEngine',\n", + " inputs=[fx.Flow('Fuel', bus='Gas')],\n", + " outputs=[fx.Flow('Elec', bus='Electricity')],\n", + " piecewise_conversion=piecewise_efficiency,\n", + " ),\n", + " fx.Sink('Load', inputs=[fx.Flow('Elec', bus='Electricity', size=1, fixed_relative_profile=elec_demand)]),\n", + ")\n", + "\n", + "fs.optimize(fx.solvers.HighsSolver())" + ] + }, + { + "cell_type": "markdown", + "id": "7", + "metadata": {}, + "source": [ + "## Visualize the Efficiency Curve" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "8", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T13:46:21.384359Z", + "start_time": "2025-12-13T13:46:21.288290Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + " \n", + " \n", + " " + ] + }, + "jetTransient": { + "display_id": null + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
" + ] + }, + "jetTransient": { + "display_id": null + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fs.components['GasEngine'].piecewise_conversion.plot(x_flow='Fuel')" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "## Results" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "10", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T13:46:22.068940Z", + "start_time": "2025-12-13T13:46:21.920317Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 600B\n", + "Dimensions: (time: 25)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 200B 2024-01-22 ... 2024-01-23\n", + "Data variables:\n", + " GasEngine(Elec) (time) float64 200B -60.0 -67.76 -75.0 ... -45.0 -52.24 nan\n", + " Load(Elec) (time) float64 200B 60.0 67.76 75.0 ... 45.0 52.24 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=GasEngine(Elec)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'GasEngine(Elec)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'GasEngine(Elec)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-22T00:00:00.000000000', '2024-01-22T01:00:00.000000000',\n", + " '2024-01-22T02:00:00.000000000', '2024-01-22T03:00:00.000000000',\n", + " '2024-01-22T04:00:00.000000000', '2024-01-22T05:00:00.000000000',\n", + " '2024-01-22T06:00:00.000000000', '2024-01-22T07:00:00.000000000',\n", + " '2024-01-22T08:00:00.000000000', '2024-01-22T09:00:00.000000000',\n", + " '2024-01-22T10:00:00.000000000', '2024-01-22T11:00:00.000000000',\n", + " '2024-01-22T12:00:00.000000000', '2024-01-22T13:00:00.000000000',\n", + " '2024-01-22T14:00:00.000000000', '2024-01-22T15:00:00.000000000',\n", + " '2024-01-22T16:00:00.000000000', '2024-01-22T17:00:00.000000000',\n", + " '2024-01-22T18:00:00.000000000', '2024-01-22T19:00:00.000000000',\n", + " '2024-01-22T20:00:00.000000000', '2024-01-22T21:00:00.000000000',\n", + " '2024-01-22T22:00:00.000000000', '2024-01-22T23:00:00.000000000',\n", + " '2024-01-23T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAATsDFOq+87vBQwAAAAAAAwF' ... '///39GwHOKoYYiHkrAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Load(Elec)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Load(Elec)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Load(Elec)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-22T00:00:00.000000000', '2024-01-22T01:00:00.000000000',\n", + " '2024-01-22T02:00:00.000000000', '2024-01-22T03:00:00.000000000',\n", + " '2024-01-22T04:00:00.000000000', '2024-01-22T05:00:00.000000000',\n", + " '2024-01-22T06:00:00.000000000', '2024-01-22T07:00:00.000000000',\n", + " '2024-01-22T08:00:00.000000000', '2024-01-22T09:00:00.000000000',\n", + " '2024-01-22T10:00:00.000000000', '2024-01-22T11:00:00.000000000',\n", + " '2024-01-22T12:00:00.000000000', '2024-01-22T13:00:00.000000000',\n", + " '2024-01-22T14:00:00.000000000', '2024-01-22T15:00:00.000000000',\n", + " '2024-01-22T16:00:00.000000000', '2024-01-22T17:00:00.000000000',\n", + " '2024-01-22T18:00:00.000000000', '2024-01-22T19:00:00.000000000',\n", + " '2024-01-22T20:00:00.000000000', '2024-01-22T21:00:00.000000000',\n", + " '2024-01-22T22:00:00.000000000', '2024-01-22T23:00:00.000000000',\n", + " '2024-01-23T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAATkDFOq+87vBQQAAAAAAAwF' ... '///39GQHOKoYYiHkpAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Electricity (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "fs.statistics.plot.balance('Electricity')" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "11", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T13:46:22.102836Z", + "start_time": "2025-12-13T13:46:22.085158Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Efficiency range: 33.8% - 41.9%\n", + "Total cost: 182.96 €\n" + ] + } + ], + "source": [ + "# Verify efficiency varies with load\n", + "fuel = fs.solution['GasEngine(Fuel)|flow_rate']\n", + "elec = fs.solution['GasEngine(Elec)|flow_rate']\n", + "efficiency = elec / fuel\n", + "\n", + "print(f'Efficiency range: {float(efficiency.min()):.1%} - {float(efficiency.max()):.1%}')\n", + "print(f'Total cost: {fs.solution[\"costs\"].item():.2f} €')" + ] + }, + { + "cell_type": "markdown", + "id": "12", + "metadata": {}, + "source": [ + "## Key Points\n", + "\n", + "**Syntax:**\n", + "```python\n", + "fx.PiecewiseConversion({\n", + " 'Input': fx.Piecewise([fx.Piece(start=a, end=b), ...]),\n", + " 'Output': fx.Piecewise([fx.Piece(start=x, end=y), ...]),\n", + "})\n", + "```\n", + "\n", + "**Rules:**\n", + "- All flows must have the **same number of segments**\n", + "- Segments typically **connect** (end of N = start of N+1)\n", + "- Efficiency = output / input at each point\n", + "\n", + "**Time-varying:** Pass arrays instead of scalars to model changing limits (e.g., temperature derating).\n", + "\n", + "**Next:** See [06c-piecewise-effects](06c-piecewise-effects.ipynb) for non-linear investment costs." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/notebooks/06c-piecewise-effects.ipynb b/docs/notebooks/06c-piecewise-effects.ipynb new file mode 100644 index 000000000..8f44b9cf2 --- /dev/null +++ b/docs/notebooks/06c-piecewise-effects.ipynb @@ -0,0 +1,4544 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Piecewise Effects\n", + "\n", + "Model **non-linear investment costs** with economies of scale and discrete size tiers.\n", + "\n", + "This notebook demonstrates:\n", + "- **PiecewiseEffects**: Non-linear cost functions for investments\n", + "- **Gaps between pieces**: Representing discrete size tiers (unavailable sizes)\n", + "- How the optimizer selects from available size options" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "1", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T09:37:05.842524Z", + "start_time": "2025-12-13T09:37:01.302972Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "flixopt.config.CONFIG" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "\n", + "import flixopt as fx\n", + "\n", + "fx.CONFIG.notebook()" + ] + }, + { + "cell_type": "markdown", + "id": "2", + "metadata": {}, + "source": [ + "## The Problem: Discrete Size Tiers\n", + "\n", + "Real equipment often comes in **discrete sizes** with gaps between options:\n", + "\n", + "| Tier | Size Range | Cost per kWh | Notes |\n", + "|------|------------|--------------|-------|\n", + "| Small | 50-100 kWh | 0.20 €/kWh | Residential units |\n", + "| *Gap* | 100-200 kWh | *unavailable* | No products in this range |\n", + "| Medium | 200-400 kWh | 0.12 €/kWh | Commercial units |\n", + "| *Gap* | 400-500 kWh | *unavailable* | No products in this range |\n", + "| Large | 500-800 kWh | 0.06 €/kWh | Industrial units |\n", + "\n", + "The gaps represent size ranges where no products are available from manufacturers." + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "## Define the Cost Curve with Gaps\n", + "\n", + "Each piece defines a size tier. Gaps between pieces are **forbidden** zones." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "4", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T09:37:05.891430Z", + "start_time": "2025-12-13T09:37:05.883541Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Available size tiers:\n", + " Small: 50-100 kWh at 0.20 €/kWh\n", + " Medium: 200-400 kWh at 0.12 €/kWh\n", + " Large: 500-800 kWh at 0.06 €/kWh\n" + ] + } + ], + "source": [ + "# Piecewise costs with gaps between tiers\n", + "# Cost values are CUMULATIVE at each breakpoint\n", + "piecewise_costs = fx.PiecewiseEffects(\n", + " piecewise_origin=fx.Piecewise(\n", + " [\n", + " fx.Piece(start=50, end=100), # Small tier: 50-100 kWh\n", + " fx.Piece(start=200, end=400), # Medium tier: 200-400 kWh (gap: 100-200)\n", + " fx.Piece(start=500, end=800), # Large tier: 500-800 kWh (gap: 400-500)\n", + " ]\n", + " ),\n", + " piecewise_shares={\n", + " 'costs': fx.Piecewise(\n", + " [\n", + " fx.Piece(start=10, end=20), # 50kWh=10€, 100kWh=20€ → 0.20 €/kWh\n", + " fx.Piece(start=24, end=48), # 200kWh=24€, 400kWh=48€ → 0.12 €/kWh\n", + " fx.Piece(start=30, end=48), # 500kWh=30€, 800kWh=48€ → 0.06 €/kWh\n", + " ]\n", + " )\n", + " },\n", + ")\n", + "\n", + "print('Available size tiers:')\n", + "print(' Small: 50-100 kWh at 0.20 €/kWh')\n", + "print(' Medium: 200-400 kWh at 0.12 €/kWh')\n", + "print(' Large: 500-800 kWh at 0.06 €/kWh')" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "8", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T09:37:05.919885Z", + "start_time": "2025-12-13T09:37:05.915254Z" + } + }, + "outputs": [], + "source": [ + "timesteps = pd.date_range('2024-01-01', periods=24, freq='h')\n", + "\n", + "# Electricity price: cheap at night, expensive during day\n", + "elec_price = np.array(\n", + " [\n", + " 0.05,\n", + " 0.05,\n", + " 0.05,\n", + " 0.05,\n", + " 0.05,\n", + " 0.05, # 00-06: night (cheap)\n", + " 0.15,\n", + " 0.20,\n", + " 0.25,\n", + " 0.25,\n", + " 0.20,\n", + " 0.15, # 06-12: morning\n", + " 0.15,\n", + " 0.20,\n", + " 0.25,\n", + " 0.30,\n", + " 0.30,\n", + " 0.25, # 12-18: afternoon (expensive)\n", + " 0.20,\n", + " 0.15,\n", + " 0.10,\n", + " 0.08,\n", + " 0.06,\n", + " 0.05, # 18-24: evening\n", + " ]\n", + ")\n", + "\n", + "demand = np.full(24, 100) # 100 kW constant demand" + ] + }, + { + "cell_type": "markdown", + "id": "120b3beb025756ef", + "metadata": {}, + "source": [ + "## Simple Arbitrage Scenario\n", + "\n", + "A battery arbitrages between cheap night and expensive day electricity." + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "## Build and Solve the Model" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "10", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T09:37:07.048599Z", + "start_time": "2025-12-13T09:37:05.935256Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running HiGHS 1.12.0 (git hash: 755a8e0): Copyright (c) 2025 HiGHS under MIT licence terms\n", + "MIP linopy-problem-wrb0ote0 has 437 rows; 294 cols; 1098 nonzeros; 54 integer variables (54 binary)\n", + "Coefficient ranges:\n", + " Matrix [1e-05, 8e+02]\n", + " Cost [1e+00, 1e+00]\n", + " Bound [1e+00, 8e+02]\n", + " RHS [1e+00, 1e+00]\n", + "Presolving model\n", + "253 rows, 159 cols, 588 nonzeros 0s\n", + "152 rows, 107 cols, 504 nonzeros 0s\n", + "151 rows, 107 cols, 500 nonzeros 0s\n", + "Presolve reductions: rows 151(-286); columns 107(-187); nonzeros 500(-598) \n", + "\n", + "Solving MIP model with:\n", + " 151 rows\n", + " 107 cols (52 binary, 0 integer, 0 implied int., 55 continuous, 0 domain fixed)\n", + " 500 nonzeros\n", + "\n", + "Src: B => Branching; C => Central rounding; F => Feasibility pump; H => Heuristic;\n", + " I => Shifting; J => Feasibility jump; L => Sub-MIP; P => Empty MIP; R => Randomized rounding;\n", + " S => Solve LP; T => Evaluate node; U => Unbounded; X => User solution; Y => HiGHS solution;\n", + " Z => ZI Round; l => Trivial lower; p => Trivial point; u => Trivial upper; z => Trivial zero\n", + "\n", + " Nodes | B&B Tree | Objective Bounds | Dynamic Constraints | Work \n", + "Src Proc. InQueue | Leaves Expl. | BestBound BestSol Gap | Cuts InLp Confl. | LpIters Time\n", + "\n", + " J 0 0 0 0.00% -inf 359 Large 0 0 0 0 0.0s\n", + " 0 0 0 0.00% 248.9944598 359 30.64% 0 0 0 62 0.0s\n", + " L 0 0 0 0.00% 248.9944598 248.9944598 0.00% 32 11 0 73 0.0s\n", + " 1 0 1 100.00% 248.9944598 248.9944598 0.00% 32 11 0 82 0.0s\n", + "\n", + "Solving report\n", + " Model linopy-problem-wrb0ote0\n", + " Status Optimal\n", + " Primal bound 248.994459834\n", + " Dual bound 248.994459834\n", + " Gap 0% (tolerance: 1%)\n", + " P-D integral 0.00660979209716\n", + " Solution status feasible\n", + " 248.994459834 (objective)\n", + " 0 (bound viol.)\n", + " 6.43929354283e-15 (int. viol.)\n", + " 0 (row viol.)\n", + " Timing 0.03\n", + " Max sub-MIP depth 1\n", + " Nodes 1\n", + " Repair LPs 0\n", + " LP iterations 82\n", + " 0 (strong br.)\n", + " 11 (separation)\n", + " 9 (heuristics)\n" + ] + }, + { + "data": { + "text/plain": [ + "FlowSystem\n", + "==========\n", + "Timesteps: 24 (Hour) [2024-01-01 to 2024-01-01]\n", + "Periods: None\n", + "Scenarios: None\n", + "Status: ✓\n", + "\n", + "Components (3 items)\n", + "--------------------\n", + " * Battery\n", + " * Demand\n", + " * Grid\n", + "\n", + "Buses (1 item)\n", + "--------------\n", + " * Elec\n", + "\n", + "Effects (2 items)\n", + "-----------------\n", + " * costs\n", + " * Penalty\n", + "\n", + "Flows (4 items)\n", + "---------------\n", + " * Battery(charge)\n", + " * Battery(discharge)\n", + " * Demand(Elec)\n", + " * Grid(Elec)" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "fs = fx.FlowSystem(timesteps)\n", + "\n", + "fs.add_elements(\n", + " fx.Bus('Elec'),\n", + " fx.Effect('costs', '€', is_standard=True, is_objective=True),\n", + " # Grid with time-varying price\n", + " fx.Source('Grid', outputs=[fx.Flow('Elec', bus='Elec', size=500, effects_per_flow_hour=elec_price)]),\n", + " # Battery with PIECEWISE investment cost (discrete tiers)\n", + " fx.Storage(\n", + " 'Battery',\n", + " charging=fx.Flow('charge', bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n", + " discharging=fx.Flow('discharge', bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n", + " capacity_in_flow_hours=fx.InvestParameters(\n", + " piecewise_effects_of_investment=piecewise_costs,\n", + " minimum_size=0,\n", + " maximum_size=800,\n", + " ),\n", + " eta_charge=0.95,\n", + " eta_discharge=0.95,\n", + " initial_charge_state=0,\n", + " ),\n", + " fx.Sink('Demand', inputs=[fx.Flow('Elec', bus='Elec', size=1, fixed_relative_profile=demand)]),\n", + ")\n", + "\n", + "fs.optimize(fx.solvers.HighsSolver())" + ] + }, + { + "cell_type": "markdown", + "id": "be5dc58de4a3c809", + "metadata": {}, + "source": [ + "## Visualize the Cost Curve\n", + "\n", + "The\n", + "plot\n", + "shows\n", + "the\n", + "three\n", + "discrete\n", + "tiers\n", + "with gaps between them." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "c734d019ece6c6fe", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T09:37:07.301104Z", + "start_time": "2025-12-13T09:37:07.136275Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + " \n", + " \n", + " " + ] + }, + "jetTransient": { + "display_id": null + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
" + ] + }, + "jetTransient": { + "display_id": null + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "piecewise_costs.plot(title='Battery Investment Cost (Discrete Tiers)')" + ] + }, + { + "cell_type": "markdown", + "id": "39b4ec726d6d43c1", + "metadata": {}, + "source": "## Results: Which Tier Was Selected?" + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "12", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T09:37:08.189381Z", + "start_time": "2025-12-13T09:37:08.142348Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Selected tier: Large (500-800 kWh)\n", + "Battery size: 800 kWh\n", + "Total cost: 249.0 €\n" + ] + } + ], + "source": [ + "battery_size = fs.solution['Battery|size'].item()\n", + "total_cost = fs.solution['costs'].item()\n", + "\n", + "# Determine which tier was selected\n", + "if battery_size < 1:\n", + " tier = 'None'\n", + "elif battery_size <= 100:\n", + " tier = 'Small (50-100 kWh)'\n", + "elif battery_size <= 400:\n", + " tier = 'Medium (200-400 kWh)'\n", + "else:\n", + " tier = 'Large (500-800 kWh)'\n", + "\n", + "print(f'Selected tier: {tier}')\n", + "print(f'Battery size: {battery_size:.0f} kWh')\n", + "print(f'Total cost: {total_cost:.1f} €')" + ] + }, + { + "cell_type": "markdown", + "id": "13", + "metadata": {}, + "source": [ + "## Storage Operation" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "14", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T09:37:08.407306Z", + "start_time": "2025-12-13T09:37:08.263634Z" + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "PlotResult(data= Size: 1kB\n", + "Dimensions: (time: 25)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 200B 2024-01-01 ... 2024-01-02\n", + "Data variables:\n", + " Grid(Elec) (time) float64 200B -100.0 -100.0 -100.0 ... -100.0 nan\n", + " Battery(discharge) (time) float64 200B -0.0 -7.267e-14 ... 1.243e-13 nan\n", + " Battery(charge) (time) float64 200B 0.0 4.425e-14 ... -1.385e-13 nan\n", + " Demand(Elec) (time) float64 200B 100.0 100.0 100.0 ... 100.0 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=Grid(Elec)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Grid(Elec)',\n", + " 'marker': {'color': '#636EFA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Grid(Elec)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAWcD+//////9YwPD//////1' ... '////9YwP///////1jAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Battery(discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Battery(discharge)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Battery(discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAIDs2puCeHQ0vfWsvI9KlT' ... 'zLt3xBPcy3fMu3fEE9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Battery(charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Battery(charge)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Battery(charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAADZtTcF8egoPT0r76NS5V' ... 'zLt3xDvcy3fMu3fEO9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Demand(Elec)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Demand(Elec)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Demand(Elec)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n", + " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n", + " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n", + " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n", + " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n", + " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n", + " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n", + " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n", + " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n", + " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n", + " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n", + " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n", + " '2024-01-02T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAWUAAAAAAAABZQAAAAAAAAF' ... 'AAAABZQAAAAAAAAFlAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Elec (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "fs.statistics.plot.balance('Elec')" + ] + }, + { + "cell_type": "markdown", + "id": "15", + "metadata": {}, + "source": [ + "## Best Practice: PiecewiseEffects with Gaps\n", + "\n", + "```python\n", + "fx.PiecewiseEffects(\n", + " piecewise_origin=fx.Piecewise([\n", + " fx.Piece(start=50, end=100), # Tier 1\n", + " fx.Piece(start=200, end=400), # Tier 2 (gap: 100-200 forbidden)\n", + " ]),\n", + " piecewise_shares={\n", + " 'costs': fx.Piecewise([\n", + " fx.Piece(start=10, end=20), # Cumulative cost at tier 1 boundaries\n", + " fx.Piece(start=24, end=48), # Cumulative cost at tier 2 boundaries\n", + " ])\n", + " },\n", + ")\n", + "```\n", + "\n", + "**Key points:**\n", + "- Gaps between pieces = forbidden size ranges\n", + "- Cost values are **cumulative** at each boundary\n", + "- Use when equipment comes in discrete tiers" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": "## Previous: Piecewise Conversion\n\nSee **[06b-piecewise-conversion](06b-piecewise-conversion.ipynb)** for modeling minimum load constraints with `PiecewiseConversion` + `StatusParameters`." + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/notebooks/07-scenarios-and-periods.ipynb b/docs/notebooks/07-scenarios-and-periods.ipynb index c8294ba7e..1b3b761d8 100644 --- a/docs/notebooks/07-scenarios-and-periods.ipynb +++ b/docs/notebooks/07-scenarios-and-periods.ipynb @@ -198,16 +198,7 @@ "id": "12", "metadata": {}, "outputs": [], - "source": [ - "flow_system = fx.FlowSystem(\n", - " timesteps=timesteps,\n", - " periods=periods,\n", - " scenarios=scenarios,\n", - " scenario_weights=scenario_weights,\n", - ")\n", - "\n", - "print(flow_system)" - ] + "source": "flow_system = fx.FlowSystem(\n timesteps=timesteps,\n periods=periods,\n scenarios=scenarios,\n scenario_weights=scenario_weights,\n)\nflow_system.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\n\nprint(flow_system)" }, { "cell_type": "markdown", diff --git a/docs/notebooks/08-large-scale-optimization.ipynb b/docs/notebooks/08-large-scale-optimization.ipynb index 3dcbb4bb9..61cae35c5 100644 --- a/docs/notebooks/08-large-scale-optimization.ipynb +++ b/docs/notebooks/08-large-scale-optimization.ipynb @@ -133,92 +133,7 @@ "id": "8", "metadata": {}, "outputs": [], - "source": [ - "def build_system(timesteps, heat_demand, elec_price, gas_price):\n", - " \"\"\"Build a FlowSystem with investment optimization.\"\"\"\n", - " fs = fx.FlowSystem(timesteps)\n", - "\n", - " fs.add_elements(\n", - " # Buses\n", - " fx.Bus('Electricity', carrier='electricity'),\n", - " fx.Bus(\n", - " 'Heat', carrier='heat', imbalance_penalty_per_flow_hour=1e5\n", - " ), # Allow for imbalance to prevent infeasibilities with fixed sizes\n", - " fx.Bus('Gas', carrier='gas'),\n", - " # Effects\n", - " fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n", - " # Gas Supply\n", - " fx.Source(\n", - " 'GasGrid',\n", - " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=gas_price)],\n", - " ),\n", - " # CHP with investment optimization\n", - " fx.linear_converters.CHP(\n", - " 'CHP',\n", - " electrical_efficiency=0.38,\n", - " thermal_efficiency=0.47,\n", - " electrical_flow=fx.Flow(\n", - " 'P_el',\n", - " bus='Electricity',\n", - " size=fx.InvestParameters(\n", - " minimum_size=0,\n", - " maximum_size=150,\n", - " effects_of_investment_per_size={'costs': 25},\n", - " ),\n", - " relative_minimum=0.4,\n", - " ),\n", - " thermal_flow=fx.Flow('Q_th', bus='Heat'),\n", - " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n", - " ),\n", - " # Gas Boiler with investment optimization\n", - " fx.linear_converters.Boiler(\n", - " 'Boiler',\n", - " thermal_efficiency=0.92,\n", - " thermal_flow=fx.Flow(\n", - " 'Q_th',\n", - " bus='Heat',\n", - " size=fx.InvestParameters(\n", - " minimum_size=0,\n", - " maximum_size=400,\n", - " effects_of_investment_per_size={'costs': 8},\n", - " ),\n", - " ),\n", - " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n", - " ),\n", - " # Thermal Storage with investment optimization\n", - " fx.Storage(\n", - " 'Storage',\n", - " capacity_in_flow_hours=fx.InvestParameters(\n", - " minimum_size=0,\n", - " maximum_size=1000,\n", - " effects_of_investment_per_size={'costs': 0.5}, # Cheap storage\n", - " ),\n", - " initial_charge_state=0,\n", - " eta_charge=0.98,\n", - " eta_discharge=0.98,\n", - " relative_loss_per_hour=0.005, # Low losses\n", - " charging=fx.Flow('Charge', bus='Heat', size=150),\n", - " discharging=fx.Flow('Discharge', bus='Heat', size=150),\n", - " ),\n", - " # Electricity Sales\n", - " fx.Sink(\n", - " 'ElecSales',\n", - " inputs=[fx.Flow('P_el', bus='Electricity', size=200, effects_per_flow_hour=-elec_price)],\n", - " ),\n", - " # Heat Demand\n", - " fx.Sink(\n", - " 'HeatDemand',\n", - " inputs=[fx.Flow('Q_th', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n", - " ),\n", - " )\n", - "\n", - " return fs\n", - "\n", - "\n", - "# Build the base system\n", - "flow_system = build_system(timesteps, heat_demand, elec_price, gas_price)\n", - "print(f'Base system: {len(timesteps)} timesteps')" - ] + "source": "def build_system(timesteps, heat_demand, elec_price, gas_price):\n \"\"\"Build a FlowSystem with investment optimization.\"\"\"\n fs = fx.FlowSystem(timesteps)\n fs.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n )\n fs.add_elements(\n # Buses\n fx.Bus('Electricity', carrier='electricity'),\n fx.Bus(\n 'Heat', carrier='heat', imbalance_penalty_per_flow_hour=1e5\n ), # Allow for imbalance to prevent infeasibilities with fixed sizes\n fx.Bus('Gas', carrier='gas'),\n # Effects\n fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n # Gas Supply\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=gas_price)],\n ),\n # CHP with investment optimization\n fx.linear_converters.CHP(\n 'CHP',\n electrical_efficiency=0.38,\n thermal_efficiency=0.47,\n electrical_flow=fx.Flow(\n 'P_el',\n bus='Electricity',\n size=fx.InvestParameters(\n minimum_size=0,\n maximum_size=150,\n effects_of_investment_per_size={'costs': 25},\n ),\n relative_minimum=0.4,\n ),\n thermal_flow=fx.Flow('Q_th', bus='Heat'),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n ),\n # Gas Boiler with investment optimization\n fx.linear_converters.Boiler(\n 'Boiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow(\n 'Q_th',\n bus='Heat',\n size=fx.InvestParameters(\n minimum_size=0,\n maximum_size=400,\n effects_of_investment_per_size={'costs': 8},\n ),\n ),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n ),\n # Thermal Storage with investment optimization\n fx.Storage(\n 'Storage',\n capacity_in_flow_hours=fx.InvestParameters(\n minimum_size=0,\n maximum_size=1000,\n effects_of_investment_per_size={'costs': 0.5}, # Cheap storage\n ),\n initial_charge_state=0,\n eta_charge=0.98,\n eta_discharge=0.98,\n relative_loss_per_hour=0.005, # Low losses\n charging=fx.Flow('Charge', bus='Heat', size=150),\n discharging=fx.Flow('Discharge', bus='Heat', size=150),\n ),\n # Electricity Sales\n fx.Sink(\n 'ElecSales',\n inputs=[fx.Flow('P_el', bus='Electricity', size=200, effects_per_flow_hour=-elec_price)],\n ),\n # Heat Demand\n fx.Sink(\n 'HeatDemand',\n inputs=[fx.Flow('Q_th', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n ),\n )\n\n return fs\n\n\n# Build the base system\nflow_system = build_system(timesteps, heat_demand, elec_price, gas_price)\nprint(f'Base system: {len(timesteps)} timesteps')" }, { "cell_type": "markdown", diff --git a/docs/notebooks/09-plotting-and-data-access.ipynb b/docs/notebooks/09-plotting-and-data-access.ipynb index cadcc240a..091d5f1d4 100644 --- a/docs/notebooks/09-plotting-and-data-access.ipynb +++ b/docs/notebooks/09-plotting-and-data-access.ipynb @@ -16,14 +16,20 @@ }, { "cell_type": "code", - "execution_count": 1, "id": "2", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:29.505282Z", - "start_time": "2025-12-12T12:06:26.542476Z" + "end_time": "2025-12-13T14:13:06.543191Z", + "start_time": "2025-12-13T14:13:00.434024Z" } }, + "source": [ + "from pathlib import Path\n", + "\n", + "import flixopt as fx\n", + "\n", + "fx.CONFIG.notebook()" + ], "outputs": [ { "data": { @@ -36,13 +42,7 @@ "output_type": "execute_result" } ], - "source": [ - "from pathlib import Path\n", - "\n", - "import flixopt as fx\n", - "\n", - "fx.CONFIG.notebook()" - ] + "execution_count": 1 }, { "cell_type": "markdown", @@ -56,14 +56,17 @@ }, { "cell_type": "code", - "execution_count": 2, "id": "4", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:35.136859Z", - "start_time": "2025-12-12T12:06:29.554928Z" + "end_time": "2025-12-13T14:13:14.318678Z", + "start_time": "2025-12-13T14:13:06.637107Z" } }, + "source": [ + "# Run the generation script (only needed once, or to regenerate)\n", + "!python data/generate_example_systems.py" + ], "outputs": [ { "name": "stdout", @@ -71,27 +74,26 @@ "text": [ "Creating simple_system...\r\n", " Optimizing...\r\n", - " Saving to /Users/felix/PycharmProjects/flixopt_182303/docs/notebooks/data/simple_system.nc4...\r\n", + " Saving to /Users/felix/PycharmProjects/flixopt_719231/docs/notebooks/data/simple_system.nc4...\r\n", " Done. Objective: 558.83\r\n", "\r\n", "Creating complex_system...\r\n", " Optimizing...\r\n", - " Saving to /Users/felix/PycharmProjects/flixopt_182303/docs/notebooks/data/complex_system.nc4...\r\n", - " Done. Objective: 220.25\r\n", + "HighsMipSolverData::transformNewIntegerFeasibleSolution tmpSolver.run();\r\n", + "HighsMipSolverData::transformNewIntegerFeasibleSolution tmpSolver.run();\r\n", + " Saving to /Users/felix/PycharmProjects/flixopt_719231/docs/notebooks/data/complex_system.nc4...\r\n", + " Done. Objective: 302.36\r\n", "\r\n", "Creating multiperiod_system...\r\n", " Optimizing...\r\n", - " Saving to /Users/felix/PycharmProjects/flixopt_182303/docs/notebooks/data/multiperiod_system.nc4...\r\n", - " Done. Objective: 644.93\r\n", + " Saving to /Users/felix/PycharmProjects/flixopt_719231/docs/notebooks/data/multiperiod_system.nc4...\r\n", + " Done. Objective: 19472.48\r\n", "\r\n", "All systems generated successfully!\r\n" ] } ], - "source": [ - "# Run the generation script (only needed once, or to regenerate)\n", - "!python data/generate_example_systems.py" - ] + "execution_count": 2 }, { "cell_type": "markdown", @@ -105,26 +107,13 @@ }, { "cell_type": "code", - "execution_count": 3, "id": "6", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:35.466083Z", - "start_time": "2025-12-12T12:06:35.210813Z" + "end_time": "2025-12-13T14:13:14.940793Z", + "start_time": "2025-12-13T14:13:14.377412Z" } }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Loaded systems:\n", - " simple: 4 components, 2 buses\n", - " complex_sys: 9 components, 3 buses\n", - " multiperiod: 4 components, dims={'scenario': 2, 'period': 3, 'time': 49}\n" - ] - } - ], "source": [ "DATA_DIR = Path('data')\n", "\n", @@ -137,7 +126,20 @@ "print(f' simple: {len(simple.components)} components, {len(simple.buses)} buses')\n", "print(f' complex_sys: {len(complex_sys.components)} components, {len(complex_sys.buses)} buses')\n", "print(f' multiperiod: {len(multiperiod.components)} components, dims={dict(multiperiod.solution.sizes)}')" - ] + ], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded systems:\n", + " simple: 4 components, 2 buses\n", + " complex_sys: 9 components, 3 buses\n", + " multiperiod: 4 components, dims={'scenario': 2, 'period': 3, 'time': 49}\n" + ] + } + ], + "execution_count": 3 }, { "cell_type": "markdown", @@ -147,202 +149,34 @@ }, { "cell_type": "code", - "execution_count": null, "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# Balance plot for the Heat bus - shows all inflows and outflows\n", - "simple.statistics.plot.balance('Heat')" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:35.534937Z", - "start_time": "2025-12-12T12:06:35.496736Z" - } - }, - "source": "### Accessing Plot Data\n\nEvery plot returns a `PlotResult` with both the figure and underlying data. Use `.data.to_dataframe()` to get a pandas DataFrame:" - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# Get plot result and access the underlying data\n", - "result = simple.statistics.plot.balance('Heat', show=False)\n", - "\n", - "# Convert to DataFrame for easy viewing/export\n", - "df = result.data.to_dataframe()\n", - "df.head(10)" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:35.617665Z", - "start_time": "2025-12-12T12:06:35.585811Z" - } - }, - "source": "### Energy Totals\n\nGet total energy by flow using `flow_hours`:" - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": "import pandas as pd\n\n# Total energy per flow\ntotals = {var: float(simple.statistics.flow_hours[var].sum()) for var in simple.statistics.flow_hours.data_vars}\n\npd.Series(totals, name='Energy [kWh]').to_frame().T" - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:35.754890Z", - "start_time": "2025-12-12T12:06:35.735084Z" - } - }, - "source": "## 3. Time Series Plots" - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": "### 3.1 Balance Plot\n\nShows inflows (positive) and outflows (negative) for a bus or component:" - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:35.874652Z", - "start_time": "2025-12-12T12:06:35.844281Z" - } - }, - "outputs": [], - "source": [ - "# Component balance (all flows of a component)\n", - "simple.statistics.plot.balance('ThermalStorage')" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": "### 3.2 Carrier Balance\n\nShows all flows of a specific carrier across the entire system:" - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:36.112518Z", - "start_time": "2025-12-12T12:06:36.004885Z" - } - }, - "outputs": [], - "source": [ - "complex_sys.statistics.plot.carrier_balance('heat')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "complex_sys.statistics.plot.carrier_balance('electricity')" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:36.266666Z", - "start_time": "2025-12-12T12:06:36.198686Z" - } - }, - "source": "### 3.3 Flow Rates\n\nPlot multiple flow rates together:" - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# All flows\n", - "simple.statistics.plot.flows()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:36.455687Z", - "start_time": "2025-12-12T12:06:36.450204Z" + "end_time": "2025-12-13T14:13:15.234587Z", + "start_time": "2025-12-13T14:13:14.950674Z" } }, - "outputs": [], - "source": [ - "# Flows filtered by component\n", - "simple.statistics.plot.flows(component='Boiler')" - ] - }, - { - "cell_type": "markdown", - "id": "32", - "metadata": {}, "source": [ - "### 3.4 Storage Plot\n", - "\n", - "Combined view of storage charge state and flows:" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "33", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:41.765686Z", - "start_time": "2025-12-12T12:06:41.441569Z" - } - }, + "# Balance plot for the Heat bus - shows all inflows and outflows\n", + "simple.statistics.plot.balance('Heat')" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ - "PlotResult(data= Size: 5kB\n", + "PlotResult(data= Size: 7kB\n", "Dimensions: (time: 169)\n", "Coordinates:\n", " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n", "Data variables:\n", - " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n", + " Boiler(Heat) (time) float64 1kB -32.48 -29.31 ... -124.5 nan\n", " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n", - " charge_state (time) float64 1kB 250.0 248.8 ... 102.5 200.0, figure=Figure({\n", - " 'data': [{'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", - " 'legendgroup': 'ThermalStorage(Charge)',\n", - " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'ThermalStorage(Charge)',\n", + " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n", + " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", " 'textposition': 'auto',\n", @@ -433,12 +267,12 @@ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n", + " 'y': {'bdata': ('5ZuWpeU9QMD3U8WNBU89wHjXQkqFnk' ... '////8zwPW5+Ef5Hl/AAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", " 'legendgroup': 'ThermalStorage(Discharge)',\n", - " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", " 'name': 'ThermalStorage(Discharge)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", @@ -533,15 +367,14 @@ " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", - " {'hovertemplate': 'time=%{x}
value=%{y}',\n", - " 'legendgroup': '',\n", - " 'line': {'color': 'black', 'width': 2},\n", - " 'marker': {'symbol': 'circle'},\n", - " 'mode': 'lines',\n", - " 'name': 'charge_state',\n", + " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", - " 'type': 'scatter',\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", @@ -628,73 +461,17 @@ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('AAAAAABAb0AAAAAAABhvQDkzMzMz8G' ... 'LbxcFZQPDkQtTNoFlAAAAAAAAAaUA='),\n", + " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", - " 'yaxis': 'y2'}],\n", - " 'layout': {'bargap': 0,\n", - " 'bargroupgap': 0,\n", - " 'barmode': 'relative',\n", - " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", - " 'template': '...',\n", - " 'title': {'text': 'ThermalStorage Operation (flow_rate)'},\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", - " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}},\n", - " 'yaxis2': {'overlaying': 'y', 'showgrid': False, 'side': 'right', 'title': {'text': 'Charge State'}}}\n", - "}))" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "simple.statistics.plot.storage('ThermalStorage')" - ] - }, - { - "cell_type": "markdown", - "id": "34", - "metadata": {}, - "source": [ - "### 3.5 Charge States Plot\n", - "\n", - "Plot charge state time series directly:" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "35", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:42.032694Z", - "start_time": "2025-12-12T12:06:41.807633Z" - } - }, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 3kB\n", - "Dimensions: (time: 169)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-01-22\n", - "Data variables:\n", - " ThermalStorage (time) float64 1kB 250.0 248.8 247.5 ... 103.0 102.5 200.0, figure=Figure({\n", - " 'data': [{'hovertemplate': 'variable=ThermalStorage
time=%{x}
value=%{y}',\n", - " 'legendgroup': 'ThermalStorage',\n", - " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", - " 'marker': {'symbol': 'circle'},\n", - " 'mode': 'lines',\n", - " 'name': 'ThermalStorage',\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Office(Heat)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'Office(Heat)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", - " 'type': 'scatter',\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", @@ -781,5199 +558,4317 @@ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('AAAAAABAb0AAAAAAABhvQDkzMzMz8G' ... 'LbxcFZQPDkQtTNoFlAAAAAAAAAaUA='),\n", + " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'}],\n", - " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", " 'template': '...',\n", - " 'title': {'text': 'Storage Charge States'},\n", + " 'title': {'text': 'Heat (flow_rate)'},\n", " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", - " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'Charge State'}}}\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", "}))" + ], + "text/html": [ + "
\n", + "
" ] }, - "execution_count": 18, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "simple.statistics.plot.charge_states('ThermalStorage')" - ] - }, - { - "cell_type": "markdown", - "id": "36", - "metadata": {}, - "source": [ - "## 4. Aggregated Plots" - ] + "execution_count": 4 }, { "cell_type": "markdown", - "id": "37", - "metadata": {}, - "source": [ - "### 4.1 Sizes Plot\n", - "\n", - "Bar chart of component/flow sizes:" - ] + "id": "9", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:35.534937Z", + "start_time": "2025-12-12T12:06:35.496736Z" + } + }, + "source": "### Accessing Plot Data\n\nEvery plot returns a `PlotResult` with both the figure and underlying data. Use `.data.to_dataframe()` to get a pandas DataFrame:" }, { "cell_type": "code", - "execution_count": 19, - "id": "38", + "id": "10", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:42.195142Z", - "start_time": "2025-12-12T12:06:42.126462Z" + "end_time": "2025-12-13T14:13:15.732085Z", + "start_time": "2025-12-13T14:13:15.577916Z" } }, + "source": [ + "# Get plot result and access the underlying data\n", + "result = simple.statistics.plot.balance('Heat', show=False)\n", + "\n", + "# Convert to DataFrame for easy viewing/export\n", + "df = result.data.to_dataframe()\n", + "df.head(10)" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ - "PlotResult(data= Size: 0B\n", - "Dimensions: ()\n", - "Data variables:\n", - " *empty*, figure=Figure({\n", - " 'data': [], 'layout': {'template': '...'}\n", - "}))" + " Boiler(Heat) ThermalStorage(Discharge) \\\n", + "time \n", + "2024-01-15 00:00:00 -32.483571 -0.000000e+00 \n", + "2024-01-15 01:00:00 -29.308678 5.275242e-13 \n", + "2024-01-15 02:00:00 -33.238443 -7.086767e-13 \n", + "2024-01-15 03:00:00 -101.411593 -3.516828e-13 \n", + "2024-01-15 04:00:00 -128.829233 -5.613288e-13 \n", + "2024-01-15 05:00:00 -128.829315 -7.033655e-13 \n", + "2024-01-15 06:00:00 -0.000000 -3.789606e+01 \n", + "2024-01-15 07:00:00 -0.000000 -8.383717e+01 \n", + "2024-01-15 08:00:00 -0.000000 -7.765263e+01 \n", + "2024-01-15 09:00:00 -0.000000 -8.271280e+01 \n", + "\n", + " ThermalStorage(Charge) Office(Heat) \n", + "time \n", + "2024-01-15 00:00:00 0.000000e+00 32.483571 \n", + "2024-01-15 01:00:00 -3.747575e-13 29.308678 \n", + "2024-01-15 02:00:00 8.792069e-13 33.238443 \n", + "2024-01-15 03:00:00 6.379644e+01 37.615149 \n", + "2024-01-15 04:00:00 1.000000e+02 28.829233 \n", + "2024-01-15 05:00:00 1.000000e+02 28.829315 \n", + "2024-01-15 06:00:00 1.055048e-12 37.896064 \n", + "2024-01-15 07:00:00 7.033655e-13 83.837174 \n", + "2024-01-15 08:00:00 -7.673862e-13 77.652628 \n", + "2024-01-15 09:00:00 7.033655e-13 82.712800 " + ], + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Boiler(Heat)ThermalStorage(Discharge)ThermalStorage(Charge)Office(Heat)
time
2024-01-15 00:00:00-32.483571-0.000000e+000.000000e+0032.483571
2024-01-15 01:00:00-29.3086785.275242e-13-3.747575e-1329.308678
2024-01-15 02:00:00-33.238443-7.086767e-138.792069e-1333.238443
2024-01-15 03:00:00-101.411593-3.516828e-136.379644e+0137.615149
2024-01-15 04:00:00-128.829233-5.613288e-131.000000e+0228.829233
2024-01-15 05:00:00-128.829315-7.033655e-131.000000e+0228.829315
2024-01-15 06:00:00-0.000000-3.789606e+011.055048e-1237.896064
2024-01-15 07:00:00-0.000000-8.383717e+017.033655e-1383.837174
2024-01-15 08:00:00-0.000000-7.765263e+01-7.673862e-1377.652628
2024-01-15 09:00:00-0.000000-8.271280e+017.033655e-1382.712800
\n", + "
" ] }, - "execution_count": 19, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "simple.statistics.plot.sizes()" - ] + "execution_count": 5 }, { "cell_type": "markdown", - "id": "39", - "metadata": {}, - "source": [ - "### 4.2 Effects Plot\n", - "\n", - "Bar chart of effect totals by component:" - ] + "id": "11", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-12T12:06:35.617665Z", + "start_time": "2025-12-12T12:06:35.585811Z" + } + }, + "source": "### Energy Totals\n\nGet total energy by flow using `flow_hours`:" }, { "cell_type": "code", - "execution_count": 20, - "id": "40", + "id": "12", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:42.497806Z", - "start_time": "2025-12-12T12:06:42.283099Z" + "end_time": "2025-12-13T14:13:15.948455Z", + "start_time": "2025-12-13T14:13:15.924150Z" } }, + "source": "import pandas as pd\n\n# Total energy per flow\ntotals = {var: float(simple.statistics.flow_hours[var].sum()) for var in simple.statistics.flow_hours.data_vars}\n\npd.Series(totals, name='Energy [kWh]').to_frame().T", "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ - "PlotResult(data= Size: 24B\n", - "Dimensions: (effect: 1, component: 1)\n", - "Coordinates:\n", - " * effect (effect) object 8B 'costs'\n", - " * component (component) object 8B 'GasGrid'\n", - "Data variables:\n", - " total (effect, component) float64 8B 558.8, figure=Figure({\n", - " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n", - " 'legendgroup': 'GasGrid',\n", - " 'marker': {'color': '#a4fc3b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'GasGrid',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['GasGrid'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': 'sDkY5qR2gUA=', 'dtype': 'f8'},\n", - " 'yaxis': 'y'}],\n", - " 'layout': {'bargap': 0,\n", - " 'bargroupgap': 0,\n", - " 'barmode': 'relative',\n", - " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n", - " 'template': '...',\n", - " 'title': {'text': 'costs (total) by component'},\n", - " 'xaxis': {'anchor': 'y',\n", - " 'categoryarray': [GasGrid],\n", - " 'categoryorder': 'array',\n", - " 'domain': [0.0, 1.0],\n", - " 'title': {'text': 'component'}},\n", - " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", - "}))" + " GasGrid(Gas) Boiler(Gas) Boiler(Heat) ThermalStorage(Charge) \\\n", + "Energy [kWh] 8936.665406 8936.665406 8221.732173 3457.182735 \n", + "\n", + " ThermalStorage(Discharge) Office(Heat) \n", + "Energy [kWh] 3242.788948 8007.338386 " + ], + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
GasGrid(Gas)Boiler(Gas)Boiler(Heat)ThermalStorage(Charge)ThermalStorage(Discharge)Office(Heat)
Energy [kWh]8936.6654068936.6654068221.7321733457.1827353242.7889488007.338386
\n", + "
" ] }, - "execution_count": 20, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "simple.statistics.plot.effects(effect='costs')" - ] + "execution_count": 6 }, { - "cell_type": "code", - "execution_count": 21, - "id": "41", + "cell_type": "markdown", + "id": "13", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:43.064579Z", - "start_time": "2025-12-12T12:06:42.560263Z" + "end_time": "2025-12-12T12:06:35.754890Z", + "start_time": "2025-12-12T12:06:35.735084Z" } }, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 72B\n", - "Dimensions: (effect: 1, component: 4)\n", - "Coordinates:\n", - " * effect (effect) object 8B 'costs'\n", - " * component (component) object 32B 'CHP' 'ElectricityExport' ... 'GasGrid'\n", - "Data variables:\n", - " total (effect, component) float64 32B 78.0 -386.3 118.1 410.4, figure=Figure({\n", - " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n", - " 'legendgroup': 'CHP',\n", - " 'marker': {'color': '#30123b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'CHP',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['CHP'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': 'AAAAAACAU0A=', 'dtype': 'f8'},\n", - " 'yaxis': 'y'},\n", - " {'hovertemplate': 'component=%{x}
value=%{y}',\n", - " 'legendgroup': 'ElectricityExport',\n", - " 'marker': {'color': '#21e2b5', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'ElectricityExport',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['ElectricityExport'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': '3ObHwIskeMA=', 'dtype': 'f8'},\n", - " 'yaxis': 'y'},\n", - " {'hovertemplate': 'component=%{x}
value=%{y}',\n", - " 'legendgroup': 'ElectricityImport',\n", - " 'marker': {'color': '#f7b836', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'ElectricityImport',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['ElectricityImport'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': 'A0vkOKSHXUA=', 'dtype': 'f8'},\n", - " 'yaxis': 'y'},\n", - " {'hovertemplate': 'component=%{x}
value=%{y}',\n", - " 'legendgroup': 'GasGrid',\n", - " 'marker': {'color': '#7a0402', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'GasGrid',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['GasGrid'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': 'AUZx5ZymeUA=', 'dtype': 'f8'},\n", - " 'yaxis': 'y'}],\n", - " 'layout': {'bargap': 0,\n", - " 'bargroupgap': 0,\n", - " 'barmode': 'relative',\n", - " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n", - " 'template': '...',\n", - " 'title': {'text': 'costs (total) by component'},\n", - " 'xaxis': {'anchor': 'y',\n", - " 'categoryarray': [CHP, ElectricityExport,\n", - " ElectricityImport, GasGrid],\n", - " 'categoryorder': 'array',\n", - " 'domain': [0.0, 1.0],\n", - " 'title': {'text': 'component'}},\n", - " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", - "}))" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Multi-effect system: compare costs and CO2\n", - "complex_sys.statistics.plot.effects(effect='costs')" - ] + "source": "## 3. Time Series Plots" + }, + { + "cell_type": "markdown", + "id": "14", + "metadata": {}, + "source": "### 3.1 Balance Plot\n\nShows inflows (positive) and outflows (negative) for a bus or component:" }, { "cell_type": "code", - "execution_count": 22, - "id": "42", + "id": "15", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:43.867944Z", - "start_time": "2025-12-12T12:06:43.136118Z" + "end_time": "2025-12-13T14:13:16.412850Z", + "start_time": "2025-12-13T14:13:16.305115Z" } }, + "source": [ + "# Component balance (all flows of a component)\n", + "simple.statistics.plot.balance('ThermalStorage')" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ - "PlotResult(data= Size: 72B\n", - "Dimensions: (effect: 1, component: 4)\n", + "PlotResult(data= Size: 4kB\n", + "Dimensions: (time: 169)\n", "Coordinates:\n", - " * effect (effect) object 8B 'CO2'\n", - " * component (component) object 32B 'CHP' 'ElectricityExport' ... 'GasGrid'\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n", "Data variables:\n", - " total (effect, component) float64 32B 0.0 0.0 295.3 1.368e+03, figure=Figure({\n", - " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n", - " 'legendgroup': 'CHP',\n", - " 'marker': {'color': '#30123b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'CHP',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['CHP'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", - " 'yaxis': 'y'},\n", - " {'hovertemplate': 'component=%{x}
value=%{y}',\n", - " 'legendgroup': 'ElectricityExport',\n", - " 'marker': {'color': '#21e2b5', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'ElectricityExport',\n", + " ThermalStorage(Charge) (time) float64 1kB -0.0 3.748e-13 ... -100.0 nan\n", + " ThermalStorage(Discharge) (time) float64 1kB 0.0 -5.275e-13 ... nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", " 'textposition': 'auto',\n", " 'type': 'bar',\n", - " 'x': array(['ElectricityExport'], dtype=object),\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'y': {'bdata': ('AAAAAAAAAIAUfPDBB19aPby8nSEx72' ... 'AAAAAAgNj//////1jAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", - " {'hovertemplate': 'component=%{x}
value=%{y}',\n", - " 'legendgroup': 'ElectricityImport',\n", - " 'marker': {'color': '#f7b836', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'ElectricityImport',\n", + " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", " 'textposition': 'auto',\n", " 'type': 'bar',\n", - " 'x': array(['ElectricityImport'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': {'bdata': '4q6Oo8Z0ckA=', 'dtype': 'f8'},\n", - " 'yaxis': 'y'},\n", - " {'hovertemplate': 'component=%{x}
value=%{y}',\n", - " 'legendgroup': 'GasGrid',\n", - " 'marker': {'color': '#7a0402', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", - " 'name': 'GasGrid',\n", - " 'orientation': 'v',\n", - " 'showlegend': True,\n", - " 'textposition': 'auto',\n", - " 'type': 'bar',\n", - " 'x': array(['GasGrid'], dtype=object),\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': 'AmXeaS1glUA=', 'dtype': 'f8'},\n", + " 'y': {'bdata': ('AAAAAAAAAAAKPvjgg49ivby8nSEx72' ... 'AAAAAgPWP9SoFav2i9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", " 'yaxis': 'y'}],\n", " 'layout': {'bargap': 0,\n", " 'bargroupgap': 0,\n", " 'barmode': 'relative',\n", - " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", " 'template': '...',\n", - " 'title': {'text': 'CO2 (total) by component'},\n", - " 'xaxis': {'anchor': 'y',\n", - " 'categoryarray': [CHP, ElectricityExport,\n", - " ElectricityImport, GasGrid],\n", - " 'categoryorder': 'array',\n", - " 'domain': [0.0, 1.0],\n", - " 'title': {'text': 'component'}},\n", + " 'title': {'text': 'ThermalStorage (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", "}))" + ], + "text/html": [ + "
\n", + "
" ] }, - "execution_count": 22, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "complex_sys.statistics.plot.effects(effect='CO2')" - ] + "execution_count": 7 }, { "cell_type": "markdown", - "id": "43", + "id": "16", "metadata": {}, - "source": [ - "### 4.3 Duration Curve\n", - "\n", - "Shows how often each power level is reached:" - ] + "source": "### 3.2 Carrier Balance\n\nShows all flows of a specific carrier across the entire system:" }, { "cell_type": "code", - "execution_count": 23, - "id": "44", + "id": "17", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:44.614581Z", - "start_time": "2025-12-12T12:06:44.248704Z" + "end_time": "2025-12-13T14:13:16.630015Z", + "start_time": "2025-12-13T14:13:16.539450Z" } }, + "source": [ + "complex_sys.statistics.plot.carrier_balance('heat')" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ - "PlotResult(data= Size: 3kB\n", - "Dimensions: (duration: 169)\n", + "PlotResult(data= Size: 4kB\n", + "Dimensions: (time: 73)\n", "Coordinates:\n", - " * duration (duration) int64 1kB 0 1 2 3 4 5 6 ... 163 164 165 166 167 168\n", + " * time (time) datetime64[ns] 584B 2024-06-01 ... 2024-06-04\n", "Data variables:\n", - " Boiler(Heat) (duration) float64 1kB nan 137.8 134.1 133.1 ... 0.0 0.0 0.0, figure=Figure({\n", - " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
duration=%{x}
value=%{y}',\n", - " 'legendgroup': 'Boiler(Heat)',\n", - " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", - " 'marker': {'symbol': 'circle'},\n", - " 'mode': 'lines',\n", - " 'name': 'Boiler(Heat)',\n", + " CHP(Heat) (time) float64 584B 0.0 0.0 0.0 0.0 ... 0.0 0.0 nan\n", + " HeatPump(Heat) (time) float64 584B 0.0 0.0 0.0 0.0 ... 0.0 0.0 nan\n", + " BackupBoiler(Heat) (time) float64 584B 20.0 26.01 25.43 ... 20.0 nan\n", + " HeatStorage(Discharge) (time) float64 584B 0.0 0.0 0.0 ... 0.0 nan\n", + " HeatStorage(Charge) (time) float64 584B -0.0 -0.0 -0.0 ... -0.0 nan\n", + " HeatDemand(Heat) (time) float64 584B -20.0 -26.01 ... -20.0 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=CHP(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'CHP(Heat)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'CHP(Heat)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", - " 'type': 'scatter',\n", - " 'x': {'bdata': ('AAABAAIAAwAEAAUABgAHAAgACQAKAA' ... '4AnwCgAKEAogCjAKQApQCmAKcAqAA='),\n", - " 'dtype': 'i2'},\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('/////////39oQtzNVzphQLt+ZyCBw2' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=HeatPump(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatPump(Heat)',\n", + " 'marker': {'color': '#FFA15A', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'HeatPump(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=BackupBoiler(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'BackupBoiler(Heat)',\n", + " 'marker': {'color': '#19D3F3', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'BackupBoiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAANEBcQRe1SgI6QOU9Gisjbz' ... 'Dnhlw6QAAAAAAAADRAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=HeatStorage(Discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatStorage(Discharge)',\n", + " 'marker': {'color': '#FF6692', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'HeatStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' ... 'RO7MQ7PQAAAAAAAAAAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=HeatStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatStorage(Charge)',\n", + " 'marker': {'color': '#FF6692', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'HeatStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'RO7MQ+vQAAAAAAAACAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=HeatDemand(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatDemand(Heat)',\n", + " 'marker': {'color': '#B6E880', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'HeatDemand(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAANMBcQRe1SgI6wOQ9Gisjbz' ... 'Dnhlw6wAAAAAAAADTAAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'}],\n", - " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", " 'template': '...',\n", - " 'title': {'text': 'Duration Curve'},\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n", + " 'title': {'text': 'Heat Balance (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", "}))" + ], + "text/html": [ + "
\n", + "
" ] }, - "execution_count": 23, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "simple.statistics.plot.duration_curve('Boiler(Heat)')" - ] + "execution_count": 8 }, { "cell_type": "code", - "execution_count": 24, - "id": "45", + "id": "18", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:46.830321Z", - "start_time": "2025-12-12T12:06:46.454534Z" + "end_time": "2025-12-13T14:13:16.765682Z", + "start_time": "2025-12-13T14:13:16.660109Z" } }, + "source": [ + "complex_sys.statistics.plot.carrier_balance('electricity')" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ - "PlotResult(data= Size: 2kB\n", - "Dimensions: (duration: 73)\n", + "PlotResult(data= Size: 4kB\n", + "Dimensions: (time: 73)\n", "Coordinates:\n", - " * duration (duration) int64 584B 0 1 2 3 4 5 ... 67 68 69 70 71 72\n", + " * time (time) datetime64[ns] 584B 2024-06-01 ... 2024-06-04\n", "Data variables:\n", - " CHP(Heat) (duration) float64 584B nan 85.0 85.0 ... 0.0 0.0 0.0\n", - " HeatPump(Heat) (duration) float64 584B nan 40.0 40.0 ... 0.0 0.0 0.0\n", - " BackupBoiler(Heat) (duration) float64 584B nan 0.0 0.0 0.0 ... 0.0 0.0 0.0, figure=Figure({\n", - " 'data': [{'hovertemplate': 'variable=CHP(Heat)
duration=%{x}
value=%{y}',\n", - " 'legendgroup': 'CHP(Heat)',\n", - " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", - " 'marker': {'symbol': 'circle'},\n", - " 'mode': 'lines',\n", - " 'name': 'CHP(Heat)',\n", + " ElectricityImport(El) (time) float64 584B 23.49 20.59 21.13 ... 17.12 nan\n", + " CHP(El) (time) float64 584B 0.0 0.0 0.0 0.0 ... 0.0 0.0 nan\n", + " ElectricityExport(El) (time) float64 584B -0.0 -0.0 -0.0 ... -0.0 -0.0 nan\n", + " HeatPump(El) (time) float64 584B -0.0 -0.0 -0.0 ... -0.0 -0.0 nan\n", + " ElDemand(El) (time) float64 584B -23.49 -20.59 ... -17.12 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=ElectricityImport(El)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityImport(El)',\n", + " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityImport(El)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", - " 'type': 'scatter',\n", - " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", - " 'dtype': 'i1'},\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('/////////38AAAAAAEBVQAAAAAAAQF' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'y': {'bdata': ('2HsanZJ8N0B/T9mTNpc0QB5Tg3x1IT' ... 'ANSU0wQAE5VciyHTFAAAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", - " {'hovertemplate': 'variable=HeatPump(Heat)
duration=%{x}
value=%{y}',\n", - " 'legendgroup': 'HeatPump(Heat)',\n", - " 'line': {'color': '#EF553B', 'dash': 'solid'},\n", - " 'marker': {'symbol': 'circle'},\n", - " 'mode': 'lines',\n", - " 'name': 'HeatPump(Heat)',\n", + " {'hovertemplate': 'variable=CHP(El)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'CHP(El)',\n", + " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'CHP(El)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", - " 'type': 'scatter',\n", - " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", - " 'dtype': 'i1'},\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('/////////38AAAAAAABEQAAAAAAAAE' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", - " {'hovertemplate': 'variable=BackupBoiler(Heat)
duration=%{x}
value=%{y}',\n", - " 'legendgroup': 'BackupBoiler(Heat)',\n", - " 'line': {'color': '#00CC96', 'dash': 'solid'},\n", - " 'marker': {'symbol': 'circle'},\n", - " 'mode': 'lines',\n", - " 'name': 'BackupBoiler(Heat)',\n", + " {'hovertemplate': 'variable=ElectricityExport(El)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityExport(El)',\n", + " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityExport(El)',\n", " 'orientation': 'v',\n", " 'showlegend': True,\n", - " 'type': 'scatter',\n", - " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", - " 'dtype': 'i1'},\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('/////////38AAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'AAAAAAgAAAAAAAAACAAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", - " 'yaxis': 'y'}],\n", - " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", - " 'template': '...',\n", - " 'title': {'text': 'Duration Curve'},\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n", - " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", - "}))" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Multiple variables\n", - "complex_sys.statistics.plot.duration_curve(['CHP(Heat)', 'HeatPump(Heat)', 'BackupBoiler(Heat)'])" - ] - }, - { - "cell_type": "markdown", - "id": "46", - "metadata": {}, - "source": [ - "## 5. Heatmaps\n", - "\n", - "Heatmaps reshape time series into 2D grids (e.g., hour-of-day vs day):" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "id": "47", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:47.605052Z", - "start_time": "2025-12-12T12:06:47.328779Z" - } - }, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 2kB\n", - "Dimensions: (timeframe: 8, timestep: 24)\n", - "Coordinates:\n", - " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n", - " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", - "Data variables:\n", - " value (timestep, timeframe) float64 2kB 32.48 42.84 47.28 ... 124.5 nan, figure=Figure({\n", - " 'data': [{'coloraxis': 'coloraxis',\n", - " 'hovertemplate': 'timeframe: %{x}
timestep: %{y}
Boiler(Heat)|flow_rate: %{z}',\n", - " 'name': '0',\n", - " 'type': 'heatmap',\n", - " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n", - " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=HeatPump(El)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatPump(El)',\n", + " 'marker': {'color': '#FFA15A', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'HeatPump(El)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", - " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", - " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", - " dtype=object),\n", - " 'yaxis': 'y',\n", - " 'z': {'bdata': ('5ZuWpeU9QED8nmEA1mtFQOR8bxYopE' ... '//////M0D1ufhH+R5fQAAAAAAAAPh/'),\n", - " 'dtype': 'f8',\n", - " 'shape': '24, 8'}}],\n", - " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'Boiler(Heat)|flow_rate'}},\n", - " 'colorscale': [[0.0, '#30123b'],\n", - " [0.07142857142857142, '#4145ab'],\n", - " [0.14285714285714285, '#4675ed'],\n", - " [0.21428571428571427, '#39a2fc'],\n", - " [0.2857142857142857, '#1bcfd4'],\n", - " [0.35714285714285715, '#24eca6'],\n", - " [0.42857142857142855, '#61fc6c'], [0.5,\n", - " '#a4fc3b'], [0.5714285714285714,\n", - " '#d1e834'], [0.6428571428571429,\n", - " '#f3c63a'], [0.7142857142857143,\n", - " '#fe9b2d'], [0.7857142857142857,\n", - " '#f36315'], [0.8571428571428571,\n", - " '#d93806'], [0.9285714285714286,\n", - " '#b11901'], [1.0, '#7a0402']]},\n", - " 'margin': {'t': 60},\n", + " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'AAAAAAgAAAAAAAAACAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ElDemand(El)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElDemand(El)',\n", + " 'marker': {'color': '#FF97FF', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElDemand(El)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n", + " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n", + " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n", + " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n", + " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n", + " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n", + " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n", + " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n", + " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n", + " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n", + " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n", + " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n", + " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n", + " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n", + " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n", + " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n", + " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n", + " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n", + " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n", + " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n", + " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n", + " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n", + " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n", + " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n", + " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n", + " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n", + " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n", + " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n", + " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n", + " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n", + " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n", + " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n", + " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n", + " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n", + " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n", + " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n", + " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('2HsanZJ8N8B/T9mTNpc0wB5Tg3x1IT' ... 'ANSU0wwAE5VciyHTHAAAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", " 'template': '...',\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", - " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", + " 'title': {'text': 'Electricity Balance (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", "}))" + ], + "text/html": [ + "
\n", + "
" ] }, - "execution_count": 25, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "# Auto-reshape based on data frequency\n", - "simple.statistics.plot.heatmap('Boiler(Heat)')" - ] + "execution_count": 9 }, { - "cell_type": "code", - "execution_count": 26, - "id": "48", + "cell_type": "markdown", + "id": "19", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:48.600387Z", - "start_time": "2025-12-12T12:06:47.811215Z" + "end_time": "2025-12-12T12:06:36.266666Z", + "start_time": "2025-12-12T12:06:36.198686Z" } }, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 2kB\n", - "Dimensions: (timeframe: 8, timestep: 24)\n", - "Coordinates:\n", - " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n", - " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", - "Data variables:\n", - " value (timestep, timeframe) float64 2kB 250.0 1.379e-14 ... 102.5 nan, figure=Figure({\n", - " 'data': [{'coloraxis': 'coloraxis',\n", - " 'hovertemplate': ('timeframe: %{x}
timestep: %' ... 'rge_state: %{z}'),\n", - " 'name': '0',\n", - " 'type': 'heatmap',\n", - " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n", - " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", - " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", - " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", - " dtype=object),\n", - " 'yaxis': 'y',\n", - " 'z': {'bdata': ('AAAAAABAb0DkBdNVug0PPZGJ+Pa5Lj' ... 'AAAAAAAADw5ELUzaBZQAAAAAAAAPh/'),\n", - " 'dtype': 'f8',\n", - " 'shape': '24, 8'}}],\n", - " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'ThermalStorage|charge_state'}},\n", - " 'colorscale': [[0.0, '#30123b'],\n", - " [0.07142857142857142, '#4145ab'],\n", - " [0.14285714285714285, '#4675ed'],\n", - " [0.21428571428571427, '#39a2fc'],\n", - " [0.2857142857142857, '#1bcfd4'],\n", - " [0.35714285714285715, '#24eca6'],\n", - " [0.42857142857142855, '#61fc6c'], [0.5,\n", - " '#a4fc3b'], [0.5714285714285714,\n", - " '#d1e834'], [0.6428571428571429,\n", - " '#f3c63a'], [0.7142857142857143,\n", - " '#fe9b2d'], [0.7857142857142857,\n", - " '#f36315'], [0.8571428571428571,\n", - " '#d93806'], [0.9285714285714286,\n", - " '#b11901'], [1.0, '#7a0402']]},\n", - " 'margin': {'t': 60},\n", - " 'template': '...',\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", - " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", - "}))" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Storage charge state heatmap\n", - "simple.statistics.plot.heatmap('ThermalStorage')" - ] + "source": "### 3.3 Flow Rates\n\nPlot multiple flow rates together:" }, { "cell_type": "code", - "execution_count": 27, - "id": "49", + "id": "20", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:49.215856Z", - "start_time": "2025-12-12T12:06:48.901232Z" + "end_time": "2025-12-13T14:13:16.863735Z", + "start_time": "2025-12-13T14:13:16.783096Z" } }, + "source": [ + "# All flows\n", + "simple.statistics.plot.flows()" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ - "PlotResult(data= Size: 2kB\n", - "Dimensions: (timeframe: 8, timestep: 24)\n", + "PlotResult(data= Size: 9kB\n", + "Dimensions: (time: 169)\n", "Coordinates:\n", - " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n", - " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n", "Data variables:\n", - " value (timestep, timeframe) float64 2kB 32.48 27.28 31.72 ... 24.48 nan, figure=Figure({\n", - " 'data': [{'coloraxis': 'coloraxis',\n", - " 'hovertemplate': 'timeframe: %{x}
timestep: %{y}
Office(Heat)|flow_rate: %{z}',\n", - " 'name': '0',\n", - " 'type': 'heatmap',\n", - " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n", - " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n", - " 'xaxis': 'x',\n", - " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", - " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", - " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", - " dtype=object),\n", - " 'yaxis': 'y',\n", - " 'z': {'bdata': ('5ZuWpeU9QEDqSDirMEc7QB8FVNfUtz' ... 'AAAAAANECu5+If5Xs4QAAAAAAAAPh/'),\n", - " 'dtype': 'f8',\n", - " 'shape': '24, 8'}}],\n", - " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'Office(Heat)|flow_rate'}},\n", - " 'colorscale': [[0.0, 'rgb(247,251,255)'], [0.125,\n", - " 'rgb(222,235,247)'], [0.25,\n", - " 'rgb(198,219,239)'], [0.375,\n", - " 'rgb(158,202,225)'], [0.5,\n", - " 'rgb(107,174,214)'], [0.625,\n", - " 'rgb(66,146,198)'], [0.75,\n", - " 'rgb(33,113,181)'], [0.875,\n", - " 'rgb(8,81,156)'], [1.0,\n", - " 'rgb(8,48,107)']]},\n", - " 'template': '...',\n", - " 'title': {'text': 'Heat Demand Pattern'},\n", - " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", - " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", - "}))" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Custom colorscale\n", - "simple.statistics.plot.heatmap('Office(Heat)', color_continuous_scale='Blues', title='Heat Demand Pattern')" - ] - }, - { - "cell_type": "markdown", - "id": "50", - "metadata": {}, - "source": [ - "## 6. Sankey Diagrams\n", - "\n", - "Sankey diagrams visualize energy flows through the system." - ] - }, - { - "cell_type": "markdown", - "id": "51", - "metadata": {}, - "source": [ - "### 6.1 Flow Sankey\n", - "\n", - "Total energy flows:" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "id": "52", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:49.583991Z", - "start_time": "2025-12-12T12:06:49.299561Z" - } - }, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 1kB\n", - "Dimensions: (link: 6)\n", - "Coordinates:\n", - " * link (link) int64 48B 0 1 2 3 4 5\n", - " source (link) \n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 3kB\n", - "Dimensions: (link: 12)\n", - "Coordinates:\n", - " * link (link) int64 96B 0 1 2 3 4 5 6 7 8 9 10 11\n", - " source (link) \n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 0B\n", - "Dimensions: (link: 0)\n", - "Coordinates:\n", - " * link (link) float64 0B \n", - " source (link) float64 0B \n", - " target (link) float64 0B \n", - " label (link) float64 0B \n", - " carrier (link) float64 0B \n", - "Data variables:\n", - " value (link) float64 0B , figure=Figure({\n", - " 'data': [{'link': {'label': [], 'source': [], 'target': [], 'value': []},\n", - " 'node': {'color': [], 'label': [], 'line': {'color': 'black', 'width': 0.5}, 'pad': 15, 'thickness': 20},\n", - " 'type': 'sankey'}],\n", - " 'layout': {'template': '...', 'title': {'text': 'Investment Sizes (Capacities)'}}\n", - "}))" - ] - }, - "execution_count": 49, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "multiperiod.statistics.plot.sankey.sizes()" - ] - }, - { - "cell_type": "markdown", - "id": "56", - "metadata": {}, - "source": [ - "### 6.3 Peak Flow Sankey\n", - "\n", - "Maximum flow rates (peak power):" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "id": "57", - "metadata": { - "ExecuteTime": { - "end_time": "2025-12-12T12:06:51.458035Z", - "start_time": "2025-12-12T12:06:51.237341Z" - } - }, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 1kB\n", - "Dimensions: (link: 6)\n", - "Coordinates:\n", - " * link (link) int64 48B 0 1 2 3 4 5\n", - " source (link) \n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 184B\n", - "Dimensions: (link: 1)\n", - "Coordinates:\n", - " * link (link) int64 8B 0\n", - " source (link) \n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 488B\n", - "Dimensions: (link: 2)\n", - "Coordinates:\n", - " * link (link) int64 16B 0 1\n", - " source (link) \n", - "
" - ], - "text/plain": [ - "PlotResult(data= Size: 1kB\n", - "Dimensions: (link: 5)\n", - "Coordinates:\n", - " * link (link) int64 40B 0 1 2 3 4\n", - " source (link) \n", - " window.PlotlyConfig = {MathJaxConfig: 'local'};\n", - " if (window.MathJax && window.MathJax.Hub && window.MathJax.Hub.Config) {window.MathJax.Hub.Config({SVG: {font: \"STIX-Web\"}});}\n", - " \n", - " \n", - " " + " GasGrid(Gas) (time) float64 1kB 35.31 31.86 ... 135.3 nan\n", + " Boiler(Gas) (time) float64 1kB 35.31 31.86 ... 135.3 nan\n", + " Boiler(Heat) (time) float64 1kB 32.48 29.31 ... 124.5 nan\n", + " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n", + " ThermalStorage(Discharge) (time) float64 1kB 0.0 -5.275e-13 ... nan\n", + " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=GasGrid(Gas)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'GasGrid(Gas)',\n", + " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'GasGrid(Gas)',\n", + " 'showlegend': True,\n", + " 'type': 'scattergl',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('GuEHDXSnQUD261BXdds/QI2yoZ56EE' ... 'SmN701QKxDuYXg6WBAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Boiler(Gas)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Gas)',\n", + " 'line': {'color': '#EF553B', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'Boiler(Gas)',\n", + " 'showlegend': True,\n", + " 'type': 'scattergl',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('GuEHDXSnQUD261BXdds/QI2yoZ56EE' ... 'SmN701QKxDuYXg6WBAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'line': {'color': '#00CC96', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'Boiler(Heat)',\n", + " 'showlegend': True,\n", + " 'type': 'scattergl',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9QED3U8WNBU89QHjXQkqFnk' ... '////8zQPW5+Ef5Hl9AAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'line': {'color': '#AB63FA', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'showlegend': True,\n", + " 'type': 'scattergl',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'line': {'color': '#FFA15A', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'showlegend': True,\n", + " 'type': 'scattergl',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAKPvjgg49ivby8nSEx72' ... 'AAAAAgPWP9SoFav2i9AAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Office(Heat)',\n", + " 'line': {'color': '#19D3F3', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'Office(Heat)',\n", + " 'showlegend': True,\n", + " 'type': 'scattergl',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Flows (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 10 + }, + { + "cell_type": "code", + "id": "21", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:16.936035Z", + "start_time": "2025-12-13T14:13:16.880022Z" + } + }, + "source": [ + "# Flows filtered by component\n", + "simple.statistics.plot.flows(component='Boiler')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 4kB\n", + "Dimensions: (time: 169)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-01-22\n", + "Data variables:\n", + " Boiler(Gas) (time) float64 1kB 35.31 31.86 36.13 110.2 ... 21.74 135.3 nan\n", + " Boiler(Heat) (time) float64 1kB 32.48 29.31 33.24 101.4 ... 20.0 124.5 nan, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=Boiler(Gas)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Gas)',\n", + " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'Boiler(Gas)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('GuEHDXSnQUD261BXdds/QI2yoZ56EE' ... 'SmN701QKxDuYXg6WBAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'line': {'color': '#EF553B', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('5ZuWpeU9QED3U8WNBU89QHjXQkqFnk' ... '////8zQPW5+Ef5Hl9AAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Flows (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 11 + }, + { + "cell_type": "markdown", + "id": "32", + "metadata": {}, + "source": [ + "### 3.4 Storage Plot\n", + "\n", + "Combined view of storage charge state and flows:" + ] + }, + { + "cell_type": "code", + "id": "33", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.166751Z", + "start_time": "2025-12-13T14:13:16.985913Z" + } + }, + "source": [ + "simple.statistics.plot.storage('ThermalStorage')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 5kB\n", + "Dimensions: (time: 169)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n", + "Data variables:\n", + " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n", + " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n", + " charge_state (time) float64 1kB 250.0 248.8 ... 102.5 200.0, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Charge)',\n", + " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Charge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage(Discharge)',\n", + " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage(Discharge)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'time=%{x}
value=%{y}',\n", + " 'legendgroup': '',\n", + " 'line': {'color': 'black', 'width': 2},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'charge_state',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAABAb0AAAAAAABhvQDkzMzMz8G' ... 'LbxcFZQPDkQtTNoFlAAAAAAAAAaUA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y2'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'ThermalStorage Operation (flow_rate)'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}},\n", + " 'yaxis2': {'overlaying': 'y', 'showgrid': False, 'side': 'right', 'title': {'text': 'Charge State'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 12 + }, + { + "cell_type": "markdown", + "id": "34", + "metadata": {}, + "source": [ + "### 3.5 Charge States Plot\n", + "\n", + "Plot charge state time series directly:" + ] + }, + { + "cell_type": "code", + "id": "35", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.297322Z", + "start_time": "2025-12-13T14:13:17.214857Z" + } + }, + "source": [ + "simple.statistics.plot.charge_states('ThermalStorage')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 3kB\n", + "Dimensions: (time: 169)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-01-22\n", + "Data variables:\n", + " ThermalStorage (time) float64 1kB 250.0 248.8 247.5 ... 103.0 102.5 200.0, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=ThermalStorage
time=%{x}
value=%{y}',\n", + " 'legendgroup': 'ThermalStorage',\n", + " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'ThermalStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n", + " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n", + " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n", + " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n", + " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n", + " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n", + " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n", + " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n", + " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n", + " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n", + " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n", + " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n", + " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n", + " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n", + " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n", + " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n", + " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n", + " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n", + " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n", + " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n", + " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n", + " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n", + " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n", + " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n", + " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n", + " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n", + " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n", + " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n", + " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n", + " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n", + " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n", + " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n", + " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n", + " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n", + " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n", + " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n", + " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n", + " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n", + " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n", + " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n", + " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n", + " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n", + " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n", + " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n", + " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n", + " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n", + " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n", + " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n", + " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n", + " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n", + " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n", + " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n", + " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n", + " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n", + " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n", + " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n", + " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n", + " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n", + " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n", + " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n", + " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n", + " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n", + " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n", + " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n", + " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n", + " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n", + " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n", + " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n", + " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n", + " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n", + " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n", + " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n", + " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n", + " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n", + " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n", + " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n", + " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n", + " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n", + " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n", + " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n", + " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n", + " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n", + " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n", + " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n", + " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('AAAAAABAb0AAAAAAABhvQDkzMzMz8G' ... 'LbxcFZQPDkQtTNoFlAAAAAAAAAaUA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Storage Charge States'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'Charge State'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 13 + }, + { + "cell_type": "markdown", + "id": "36", + "metadata": {}, + "source": [ + "## 4. Aggregated Plots" + ] + }, + { + "cell_type": "markdown", + "id": "37", + "metadata": {}, + "source": [ + "### 4.1 Sizes Plot\n", + "\n", + "Bar chart of component/flow sizes:" + ] + }, + { + "cell_type": "code", + "id": "38", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:17:12.906249Z", + "start_time": "2025-12-13T14:17:12.823893Z" + } + }, + "source": "multiperiod.statistics.plot.sizes()", + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 208B\n", + "Dimensions: (period: 3, scenario: 2)\n", + "Coordinates:\n", + " * period (period) int64 24B 2024 2025 2026\n", + " * scenario (scenario) scenario=high_demand
period=2024
Size=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['Boiler(Heat)'], dtype=object),\n", + " 'xaxis': 'x4',\n", + " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y4'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2025
Size=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['Boiler(Heat)'], dtype=object),\n", + " 'xaxis': 'x5',\n", + " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y5'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2026
Size=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['Boiler(Heat)'], dtype=object),\n", + " 'xaxis': 'x6',\n", + " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y6'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2024
Size=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['Boiler(Heat)'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2025
Size=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['Boiler(Heat)'], dtype=object),\n", + " 'xaxis': 'x2',\n", + " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y2'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2026
Size=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['Boiler(Heat)'], dtype=object),\n", + " 'xaxis': 'x3',\n", + " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y3'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2024
Size=%{y}',\n", + " 'legendgroup': 'ThermalStorage',\n", + " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ThermalStorage'], dtype=object),\n", + " 'xaxis': 'x4',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y4'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2025
Size=%{y}',\n", + " 'legendgroup': 'ThermalStorage',\n", + " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ThermalStorage'], dtype=object),\n", + " 'xaxis': 'x5',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y5'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2026
Size=%{y}',\n", + " 'legendgroup': 'ThermalStorage',\n", + " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ThermalStorage'], dtype=object),\n", + " 'xaxis': 'x6',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y6'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2024
Size=%{y}',\n", + " 'legendgroup': 'ThermalStorage',\n", + " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ThermalStorage'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2025
Size=%{y}',\n", + " 'legendgroup': 'ThermalStorage',\n", + " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ThermalStorage'], dtype=object),\n", + " 'xaxis': 'x2',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y2'},\n", + " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2026
Size=%{y}',\n", + " 'legendgroup': 'ThermalStorage',\n", + " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n", + " 'name': 'ThermalStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': False,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ThermalStorage'], dtype=object),\n", + " 'xaxis': 'x3',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y3'}],\n", + " 'layout': {'annotations': [{'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'period=2024',\n", + " 'x': 0.15666666666666665,\n", + " 'xanchor': 'center',\n", + " 'xref': 'paper',\n", + " 'y': 1.0,\n", + " 'yanchor': 'bottom',\n", + " 'yref': 'paper'},\n", + " {'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'period=2025',\n", + " 'x': 0.49,\n", + " 'xanchor': 'center',\n", + " 'xref': 'paper',\n", + " 'y': 1.0,\n", + " 'yanchor': 'bottom',\n", + " 'yref': 'paper'},\n", + " {'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'period=2026',\n", + " 'x': 0.8233333333333333,\n", + " 'xanchor': 'center',\n", + " 'xref': 'paper',\n", + " 'y': 1.0,\n", + " 'yanchor': 'bottom',\n", + " 'yref': 'paper'},\n", + " {'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'scenario=low_demand',\n", + " 'textangle': 90,\n", + " 'x': 0.98,\n", + " 'xanchor': 'left',\n", + " 'xref': 'paper',\n", + " 'y': 0.2425,\n", + " 'yanchor': 'middle',\n", + " 'yref': 'paper'},\n", + " {'font': {},\n", + " 'showarrow': False,\n", + " 'text': 'scenario=high_demand',\n", + " 'textangle': 90,\n", + " 'x': 0.98,\n", + " 'xanchor': 'left',\n", + " 'xref': 'paper',\n", + " 'y': 0.7575000000000001,\n", + " 'yanchor': 'middle',\n", + " 'yref': 'paper'}],\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'Flow'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Investment Sizes'},\n", + " 'xaxis': {'anchor': 'y',\n", + " 'categoryarray': [Boiler(Heat), ThermalStorage],\n", + " 'categoryorder': 'array',\n", + " 'domain': [0.0, 0.3133333333333333],\n", + " 'title': {'text': 'Flow'}},\n", + " 'xaxis2': {'anchor': 'y2',\n", + " 'categoryarray': [Boiler(Heat), ThermalStorage],\n", + " 'categoryorder': 'array',\n", + " 'domain': [0.3333333333333333, 0.6466666666666666],\n", + " 'matches': 'x',\n", + " 'title': {'text': 'Flow'}},\n", + " 'xaxis3': {'anchor': 'y3',\n", + " 'categoryarray': [Boiler(Heat), ThermalStorage],\n", + " 'categoryorder': 'array',\n", + " 'domain': [0.6666666666666666, 0.98],\n", + " 'matches': 'x',\n", + " 'title': {'text': 'Flow'}},\n", + " 'xaxis4': {'anchor': 'y4', 'domain': [0.0, 0.3133333333333333], 'matches': 'x', 'showticklabels': False},\n", + " 'xaxis5': {'anchor': 'y5',\n", + " 'domain': [0.3333333333333333, 0.6466666666666666],\n", + " 'matches': 'x',\n", + " 'showticklabels': False},\n", + " 'xaxis6': {'anchor': 'y6', 'domain': [0.6666666666666666, 0.98], 'matches': 'x', 'showticklabels': False},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 0.485], 'title': {'text': 'Size'}},\n", + " 'yaxis2': {'anchor': 'x2', 'domain': [0.0, 0.485], 'matches': 'y', 'showticklabels': False},\n", + " 'yaxis3': {'anchor': 'x3', 'domain': [0.0, 0.485], 'matches': 'y', 'showticklabels': False},\n", + " 'yaxis4': {'anchor': 'x4', 'domain': [0.515, 1.0], 'matches': 'y', 'title': {'text': 'Size'}},\n", + " 'yaxis5': {'anchor': 'x5', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False},\n", + " 'yaxis6': {'anchor': 'x6', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 46, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 46 + }, + { + "cell_type": "markdown", + "id": "39", + "metadata": {}, + "source": [ + "### 4.2 Effects Plot\n", + "\n", + "Bar chart of effect totals by component:" + ] + }, + { + "cell_type": "code", + "id": "40", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.440231Z", + "start_time": "2025-12-13T14:13:17.355184Z" + } + }, + "source": [ + "simple.statistics.plot.effects(effect='costs')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 24B\n", + "Dimensions: (effect: 1, component: 1)\n", + "Coordinates:\n", + " * effect (effect) object 8B 'costs'\n", + " * component (component) object 8B 'GasGrid'\n", + "Data variables:\n", + " total (effect, component) float64 8B 558.8, figure=Figure({\n", + " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'GasGrid',\n", + " 'marker': {'color': '#a4fc3b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'GasGrid',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['GasGrid'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'sDkY5qR2gUA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'costs (total) by component'},\n", + " 'xaxis': {'anchor': 'y',\n", + " 'categoryarray': [GasGrid],\n", + " 'categoryorder': 'array',\n", + " 'domain': [0.0, 1.0],\n", + " 'title': {'text': 'component'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 15 + }, + { + "cell_type": "code", + "id": "41", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.547032Z", + "start_time": "2025-12-13T14:13:17.454197Z" + } + }, + "source": [ + "# Multi-effect system: compare costs and CO2\n", + "complex_sys.statistics.plot.effects(effect='costs')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 104B\n", + "Dimensions: (effect: 1, component: 6)\n", + "Coordinates:\n", + " * effect (effect) object 8B 'costs'\n", + " * component (component) object 48B 'CHP' ... 'HeatStorage'\n", + "Data variables:\n", + " total (effect, component) float64 48B 76.0 -297.4 102.9 420.8 0.0 0.0, figure=Figure({\n", + " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'CHP',\n", + " 'marker': {'color': '#30123b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'CHP',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['CHP'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAU0A=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityExport',\n", + " 'marker': {'color': '#3c99f9', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityExport',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ElectricityExport'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'QuE7D7GWcsA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityImport',\n", + " 'marker': {'color': '#49f683', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityImport',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ElectricityImport'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'mB7bhVm8WUA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'GasGrid',\n", + " 'marker': {'color': '#dfda36', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'GasGrid',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['GasGrid'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'VVvjiWRNekA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatPump',\n", + " 'marker': {'color': '#ee5a12', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'HeatPump',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['HeatPump'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatStorage',\n", + " 'marker': {'color': '#7a0402', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'HeatStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['HeatStorage'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'costs (total) by component'},\n", + " 'xaxis': {'anchor': 'y',\n", + " 'categoryarray': [CHP, ElectricityExport,\n", + " ElectricityImport, GasGrid, HeatPump,\n", + " HeatStorage],\n", + " 'categoryorder': 'array',\n", + " 'domain': [0.0, 1.0],\n", + " 'title': {'text': 'component'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 16 + }, + { + "cell_type": "code", + "id": "42", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.616154Z", + "start_time": "2025-12-13T14:13:17.558702Z" + } + }, + "source": [ + "complex_sys.statistics.plot.effects(effect='CO2')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 104B\n", + "Dimensions: (effect: 1, component: 6)\n", + "Coordinates:\n", + " * effect (effect) object 8B 'CO2'\n", + " * component (component) object 48B 'CHP' ... 'HeatStorage'\n", + "Data variables:\n", + " total (effect, component) float64 48B 0.0 0.0 255.1 1.403e+03 0.0 0.0, figure=Figure({\n", + " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'CHP',\n", + " 'marker': {'color': '#30123b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'CHP',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['CHP'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityExport',\n", + " 'marker': {'color': '#3c99f9', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityExport',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ElectricityExport'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'ElectricityImport',\n", + " 'marker': {'color': '#49f683', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'ElectricityImport',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['ElectricityImport'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'PuZR52/jb0A=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'GasGrid',\n", + " 'marker': {'color': '#dfda36', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'GasGrid',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['GasGrid'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'HMySHSnrlUA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatPump',\n", + " 'marker': {'color': '#ee5a12', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'HeatPump',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['HeatPump'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'component=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatStorage',\n", + " 'marker': {'color': '#7a0402', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n", + " 'name': 'HeatStorage',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'textposition': 'auto',\n", + " 'type': 'bar',\n", + " 'x': array(['HeatStorage'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'bargap': 0,\n", + " 'bargroupgap': 0,\n", + " 'barmode': 'relative',\n", + " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'CO2 (total) by component'},\n", + " 'xaxis': {'anchor': 'y',\n", + " 'categoryarray': [CHP, ElectricityExport,\n", + " ElectricityImport, GasGrid, HeatPump,\n", + " HeatStorage],\n", + " 'categoryorder': 'array',\n", + " 'domain': [0.0, 1.0],\n", + " 'title': {'text': 'component'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 17 + }, + { + "cell_type": "markdown", + "id": "43", + "metadata": {}, + "source": [ + "### 4.3 Duration Curve\n", + "\n", + "Shows how often each power level is reached:" + ] + }, + { + "cell_type": "code", + "id": "44", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.659929Z", + "start_time": "2025-12-13T14:13:17.624261Z" + } + }, + "source": [ + "simple.statistics.plot.duration_curve('Boiler(Heat)')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 3kB\n", + "Dimensions: (duration: 169)\n", + "Coordinates:\n", + " * duration (duration) int64 1kB 0 1 2 3 4 5 6 ... 163 164 165 166 167 168\n", + "Data variables:\n", + " Boiler(Heat) (duration) float64 1kB nan 137.8 134.1 133.1 ... 0.0 0.0 0.0, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
duration=%{x}
value=%{y}',\n", + " 'legendgroup': 'Boiler(Heat)',\n", + " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'Boiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': {'bdata': ('AAABAAIAAwAEAAUABgAHAAgACQAKAA' ... '4AnwCgAKEAogCjAKQApQCmAKcAqAA='),\n", + " 'dtype': 'i2'},\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('/////////39oQtzNVzphQLt+ZyCBw2' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Duration Curve'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 18 + }, + { + "cell_type": "code", + "id": "45", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.711351Z", + "start_time": "2025-12-13T14:13:17.670270Z" + } + }, + "source": [ + "# Multiple variables\n", + "complex_sys.statistics.plot.duration_curve(['CHP(Heat)', 'HeatPump(Heat)', 'BackupBoiler(Heat)'])" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 2kB\n", + "Dimensions: (duration: 73)\n", + "Coordinates:\n", + " * duration (duration) int64 584B 0 1 2 3 4 5 ... 67 68 69 70 71 72\n", + "Data variables:\n", + " CHP(Heat) (duration) float64 584B nan 80.88 80.62 ... 0.0 0.0 0.0\n", + " HeatPump(Heat) (duration) float64 584B nan 0.0 0.0 0.0 ... 0.0 0.0 0.0\n", + " BackupBoiler(Heat) (duration) float64 584B nan 63.11 ... -8.993e-15, figure=Figure({\n", + " 'data': [{'hovertemplate': 'variable=CHP(Heat)
duration=%{x}
value=%{y}',\n", + " 'legendgroup': 'CHP(Heat)',\n", + " 'line': {'color': '#636EFA', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'CHP(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", + " 'dtype': 'i1'},\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('/////////39Gwcq9YjhUQOyIZIeOJ1' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=HeatPump(Heat)
duration=%{x}
value=%{y}',\n", + " 'legendgroup': 'HeatPump(Heat)',\n", + " 'line': {'color': '#EF553B', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'HeatPump(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", + " 'dtype': 'i1'},\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('/////////38AAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'},\n", + " {'hovertemplate': 'variable=BackupBoiler(Heat)
duration=%{x}
value=%{y}',\n", + " 'legendgroup': 'BackupBoiler(Heat)',\n", + " 'line': {'color': '#00CC96', 'dash': 'solid'},\n", + " 'marker': {'symbol': 'circle'},\n", + " 'mode': 'lines',\n", + " 'name': 'BackupBoiler(Heat)',\n", + " 'orientation': 'v',\n", + " 'showlegend': True,\n", + " 'type': 'scatter',\n", + " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n", + " 'dtype': 'i1'},\n", + " 'xaxis': 'x',\n", + " 'y': {'bdata': ('/////////38h4dgzOo5PQDMD0m1cz0' ... 'AAAACwvAAAAAAAALi8AAAAAABABL0='),\n", + " 'dtype': 'f8'},\n", + " 'yaxis': 'y'}],\n", + " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n", + " 'template': '...',\n", + " 'title': {'text': 'Duration Curve'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n", + " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 19 + }, + { + "cell_type": "markdown", + "id": "46", + "metadata": {}, + "source": [ + "## 5. Heatmaps\n", + "\n", + "Heatmaps reshape time series into 2D grids (e.g., hour-of-day vs day):" + ] + }, + { + "cell_type": "code", + "id": "47", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.799982Z", + "start_time": "2025-12-13T14:13:17.729391Z" + } + }, + "source": [ + "# Auto-reshape based on data frequency\n", + "simple.statistics.plot.heatmap('Boiler(Heat)')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 2kB\n", + "Dimensions: (timeframe: 8, timestep: 24)\n", + "Coordinates:\n", + " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n", + " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", + "Data variables:\n", + " value (timestep, timeframe) float64 2kB 32.48 42.84 47.28 ... 124.5 nan, figure=Figure({\n", + " 'data': [{'coloraxis': 'coloraxis',\n", + " 'hovertemplate': 'timeframe: %{x}
timestep: %{y}
Boiler(Heat)|flow_rate: %{z}',\n", + " 'name': '0',\n", + " 'type': 'heatmap',\n", + " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n", + " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", + " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", + " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", + " dtype=object),\n", + " 'yaxis': 'y',\n", + " 'z': {'bdata': ('5ZuWpeU9QED8nmEA1mtFQOR8bxYopE' ... '//////M0D1ufhH+R5fQAAAAAAAAPh/'),\n", + " 'dtype': 'f8',\n", + " 'shape': '24, 8'}}],\n", + " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'Boiler(Heat)|flow_rate'}},\n", + " 'colorscale': [[0.0, '#30123b'],\n", + " [0.07142857142857142, '#4145ab'],\n", + " [0.14285714285714285, '#4675ed'],\n", + " [0.21428571428571427, '#39a2fc'],\n", + " [0.2857142857142857, '#1bcfd4'],\n", + " [0.35714285714285715, '#24eca6'],\n", + " [0.42857142857142855, '#61fc6c'], [0.5,\n", + " '#a4fc3b'], [0.5714285714285714,\n", + " '#d1e834'], [0.6428571428571429,\n", + " '#f3c63a'], [0.7142857142857143,\n", + " '#fe9b2d'], [0.7857142857142857,\n", + " '#f36315'], [0.8571428571428571,\n", + " '#d93806'], [0.9285714285714286,\n", + " '#b11901'], [1.0, '#7a0402']]},\n", + " 'margin': {'t': 60},\n", + " 'template': '...',\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", + " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 20 + }, + { + "cell_type": "code", + "id": "48", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.849042Z", + "start_time": "2025-12-13T14:13:17.808302Z" + } + }, + "source": [ + "# Storage charge state heatmap\n", + "simple.statistics.plot.heatmap('ThermalStorage')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 2kB\n", + "Dimensions: (timeframe: 8, timestep: 24)\n", + "Coordinates:\n", + " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n", + " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", + "Data variables:\n", + " value (timestep, timeframe) float64 2kB 250.0 1.379e-14 ... 102.5 nan, figure=Figure({\n", + " 'data': [{'coloraxis': 'coloraxis',\n", + " 'hovertemplate': ('timeframe: %{x}
timestep: %' ... 'rge_state: %{z}'),\n", + " 'name': '0',\n", + " 'type': 'heatmap',\n", + " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n", + " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", + " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", + " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", + " dtype=object),\n", + " 'yaxis': 'y',\n", + " 'z': {'bdata': ('AAAAAABAb0DkBdNVug0PPZGJ+Pa5Lj' ... 'AAAAAAAADw5ELUzaBZQAAAAAAAAPh/'),\n", + " 'dtype': 'f8',\n", + " 'shape': '24, 8'}}],\n", + " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'ThermalStorage|charge_state'}},\n", + " 'colorscale': [[0.0, '#30123b'],\n", + " [0.07142857142857142, '#4145ab'],\n", + " [0.14285714285714285, '#4675ed'],\n", + " [0.21428571428571427, '#39a2fc'],\n", + " [0.2857142857142857, '#1bcfd4'],\n", + " [0.35714285714285715, '#24eca6'],\n", + " [0.42857142857142855, '#61fc6c'], [0.5,\n", + " '#a4fc3b'], [0.5714285714285714,\n", + " '#d1e834'], [0.6428571428571429,\n", + " '#f3c63a'], [0.7142857142857143,\n", + " '#fe9b2d'], [0.7857142857142857,\n", + " '#f36315'], [0.8571428571428571,\n", + " '#d93806'], [0.9285714285714286,\n", + " '#b11901'], [1.0, '#7a0402']]},\n", + " 'margin': {'t': 60},\n", + " 'template': '...',\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", + " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 21 + }, + { + "cell_type": "code", + "id": "49", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.900833Z", + "start_time": "2025-12-13T14:13:17.858196Z" + } + }, + "source": [ + "# Custom colorscale\n", + "simple.statistics.plot.heatmap('Office(Heat)', color_continuous_scale='Blues', title='Heat Demand Pattern')" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 2kB\n", + "Dimensions: (timeframe: 8, timestep: 24)\n", + "Coordinates:\n", + " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n", + " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n", + "Data variables:\n", + " value (timestep, timeframe) float64 2kB 32.48 27.28 31.72 ... 24.48 nan, figure=Figure({\n", + " 'data': [{'coloraxis': 'coloraxis',\n", + " 'hovertemplate': 'timeframe: %{x}
timestep: %{y}
Office(Heat)|flow_rate: %{z}',\n", + " 'name': '0',\n", + " 'type': 'heatmap',\n", + " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n", + " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n", + " 'xaxis': 'x',\n", + " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n", + " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n", + " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n", + " dtype=object),\n", + " 'yaxis': 'y',\n", + " 'z': {'bdata': ('5ZuWpeU9QEDqSDirMEc7QB8FVNfUtz' ... 'AAAAAANECu5+If5Xs4QAAAAAAAAPh/'),\n", + " 'dtype': 'f8',\n", + " 'shape': '24, 8'}}],\n", + " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'Office(Heat)|flow_rate'}},\n", + " 'colorscale': [[0.0, 'rgb(247,251,255)'], [0.125,\n", + " 'rgb(222,235,247)'], [0.25,\n", + " 'rgb(198,219,239)'], [0.375,\n", + " 'rgb(158,202,225)'], [0.5,\n", + " 'rgb(107,174,214)'], [0.625,\n", + " 'rgb(66,146,198)'], [0.75,\n", + " 'rgb(33,113,181)'], [0.875,\n", + " 'rgb(8,81,156)'], [1.0,\n", + " 'rgb(8,48,107)']]},\n", + " 'template': '...',\n", + " 'title': {'text': 'Heat Demand Pattern'},\n", + " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n", + " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n", + "}))" + ], + "text/html": [ + "
\n", + "
" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 22 + }, + { + "cell_type": "markdown", + "id": "50", + "metadata": {}, + "source": [ + "## 6. Sankey Diagrams\n", + "\n", + "Sankey diagrams visualize energy flows through the system." + ] + }, + { + "cell_type": "markdown", + "id": "51", + "metadata": {}, + "source": [ + "### 6.1 Flow Sankey\n", + "\n", + "Total energy flows:" + ] + }, + { + "cell_type": "code", + "id": "52", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.930662Z", + "start_time": "2025-12-13T14:13:17.908846Z" + } + }, + "source": [ + "simple.statistics.plot.sankey.flows()" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 1kB\n", + "Dimensions: (link: 6)\n", + "Coordinates:\n", + " * link (link) int64 48B 0 1 2 3 4 5\n", + " source (link) \n", + "
" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 23 + }, + { + "cell_type": "code", + "id": "53", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.970954Z", + "start_time": "2025-12-13T14:13:17.939809Z" + } + }, + "source": [ + "# Complex system with multiple carriers\n", + "complex_sys.statistics.plot.sankey.flows()" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 3kB\n", + "Dimensions: (link: 10)\n", + "Coordinates:\n", + " * link (link) int64 80B 0 1 2 3 4 5 6 7 8 9\n", + " source (link) \n", + "
" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 24 + }, + { + "cell_type": "markdown", + "id": "54", + "metadata": {}, + "source": [ + "### 6.2 Sizes Sankey\n", + "\n", + "Capacity/size allocation:" + ] + }, + { + "cell_type": "code", + "id": "55", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:17.993818Z", + "start_time": "2025-12-13T14:13:17.977340Z" + } + }, + "source": [ + "multiperiod.statistics.plot.sankey.sizes()" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 120B\n", + "Dimensions: (link: 1)\n", + "Coordinates:\n", + " * link (link) int64 8B 0\n", + " source (link) \n", + "
" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 25 + }, + { + "cell_type": "markdown", + "id": "56", + "metadata": {}, + "source": [ + "### 6.3 Peak Flow Sankey\n", + "\n", + "Maximum flow rates (peak power):" + ] + }, + { + "cell_type": "code", + "id": "57", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:18.029364Z", + "start_time": "2025-12-13T14:13:18.001651Z" + } + }, + "source": [ + "simple.statistics.plot.sankey.peak_flow()" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 1kB\n", + "Dimensions: (link: 6)\n", + "Coordinates:\n", + " * link (link) int64 48B 0 1 2 3 4 5\n", + " source (link) \n", + "
" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 26 + }, + { + "cell_type": "markdown", + "id": "58", + "metadata": {}, + "source": [ + "### 6.4 Effects Sankey\n", + "\n", + "Cost/emission allocation:" + ] + }, + { + "cell_type": "code", + "id": "59", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:18.051137Z", + "start_time": "2025-12-13T14:13:18.037718Z" + } + }, + "source": [ + "simple.statistics.plot.sankey.effects(select={'effect': 'costs'})" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 184B\n", + "Dimensions: (link: 1)\n", + "Coordinates:\n", + " * link (link) int64 8B 0\n", + " source (link) \n", + "
" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 27 + }, + { + "cell_type": "code", + "id": "60", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:18.072870Z", + "start_time": "2025-12-13T14:13:18.057665Z" + } + }, + "source": [ + "# CO2 allocation in complex system\n", + "complex_sys.statistics.plot.sankey.effects(select={'effect': 'CO2'})" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 488B\n", + "Dimensions: (link: 2)\n", + "Coordinates:\n", + " * link (link) int64 16B 0 1\n", + " source (link) \n", + "
" ] }, - "jetTransient": { - "display_id": null + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 28 + }, + { + "cell_type": "markdown", + "id": "61", + "metadata": {}, + "source": [ + "### 6.5 Filtering with `select`\n", + "\n", + "Filter Sankey to specific buses or carriers:" + ] + }, + { + "cell_type": "code", + "id": "62", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:18.102271Z", + "start_time": "2025-12-13T14:13:18.087615Z" + } + }, + "source": [ + "# Only heat flows\n", + "complex_sys.statistics.plot.sankey.flows(select={'bus': 'Heat'})" + ], + "outputs": [ + { + "data": { + "text/plain": [ + "PlotResult(data= Size: 576B\n", + "Dimensions: (link: 3)\n", + "Coordinates:\n", + " * link (link) int64 24B 0 1 2\n", + " source (link) \n", + "
" + ] }, + "execution_count": 29, "metadata": {}, - "output_type": "display_data" - }, + "output_type": "execute_result" + } + ], + "execution_count": 29 + }, + { + "cell_type": "markdown", + "id": "63", + "metadata": {}, + "source": [ + "## 7. Topology Visualization\n", + "\n", + "Visualize the system structure (no solution data required)." + ] + }, + { + "cell_type": "markdown", + "id": "64", + "metadata": {}, + "source": [ + "### 7.1 Topology Plot\n", + "\n", + "Sankey-style network diagram:" + ] + }, + { + "cell_type": "code", + "id": "65", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T14:13:18.129663Z", + "start_time": "2025-12-13T14:13:18.109005Z" + } + }, + "source": [ + "simple.topology.plot()" + ], + "outputs": [ { "data": { + "text/plain": [ + "PlotResult(data= Size: 1kB\n", + "Dimensions: (link: 6)\n", + "Coordinates:\n", + " * link (link) ',\n", + " 'label': [Boiler(Gas), Boiler(Heat), GasGrid(Gas),\n", + " Office(Heat), ThermalStorage(Charge),\n", + " ThermalStorage(Discharge)],\n", + " 'source': [5, 4, 0, 1, 1, 2],\n", + " 'target': [4, 1, 5, 3, 2, 1],\n", + " 'value': [1, 1, 1, 1, 1, 1]},\n", + " 'node': {'color': [#636EFA, #D62728, #00CC96, #AB63FA, #EF553B,\n", + " #1F77B4],\n", + " 'customdata': [Source('GasGrid')
outputs:
*\n", + " Flow('GasGrid(Gas)', bus='Gas', size=500.0,\n", + " effects_per_flow_hour={'costs': ~0.1}),\n", + " Bus('Heat', carrier='heat')
inputs:
\n", + " * Flow('Boiler(Heat)', bus='Heat',\n", + " size=150.0)
*\n", + " Flow('ThermalStorage(Discharge)', bus='Heat',\n", + " size=100.0,\n", + " status_parameters=StatusParameters())
\n", + " outputs:
*\n", + " Flow('ThermalStorage(Charge)', bus='Heat',\n", + " size=100.0,\n", + " status_parameters=StatusParameters())
\n", + " * Flow('Office(Heat)', bus='Heat', size=1.0,\n", + " fixed_relative_profile=20.0-92.3),\n", + " Storage('ThermalStorage',\n", + " capacity_in_flow_hours=500.0,\n", + " initial_charge_state=250.0,\n", + " minimal_final_charge_state=200.0,\n", + " eta_charge=1.0, eta_discharge=1.0,\n", + " relative_loss_per_hour=0.0)
inputs:
\n", + " * Flow('ThermalStorage(Charge)', bus='Heat',\n", + " size=100.0,\n", + " status_parameters=StatusParameters())
\n", + " outputs:
*\n", + " Flow('ThermalStorage(Discharge)', bus='Heat',\n", + " size=100.0,\n", + " status_parameters=StatusParameters()),\n", + " Sink('Office')
inputs:
*\n", + " Flow('Office(Heat)', bus='Heat', size=1.0,\n", + " fixed_relative_profile=20.0-92.3),\n", + " Boiler('Boiler', thermal_efficiency=0.9,\n", + " fuel_flow=Flow('Boiler(Gas)', bus='Gas'),\n", + " thermal_flow=Flow('Boiler(Heat)', bus='Heat',\n", + " size=150.0))
inputs:
*\n", + " Flow('Boiler(Gas)', bus='Gas')
\n", + " outputs:
* Flow('Boiler(Heat)',\n", + " bus='Heat', size=150.0), Bus('Gas',\n", + " carrier='gas')
inputs:
*\n", + " Flow('GasGrid(Gas)', bus='Gas', size=500.0,\n", + " effects_per_flow_hour={'costs': ~0.1})
\n", + " outputs:
* Flow('Boiler(Gas)',\n", + " bus='Gas')],\n", + " 'hovertemplate': '%{customdata}',\n", + " 'label': [GasGrid, Heat, ThermalStorage, Office, Boiler,\n", + " Gas],\n", + " 'line': {'color': 'black', 'width': 0.5},\n", + " 'pad': 15,\n", + " 'thickness': 20},\n", + " 'type': 'sankey'}],\n", + " 'layout': {'template': '...', 'title': {'text': 'Flow System Topology'}}\n", + "}))" + ], "text/html": [ - "
" + "
\n", + "
" ] }, - "jetTransient": { - "display_id": null - }, + "execution_count": 30, "metadata": {}, - "output_type": "display_data" + "output_type": "execute_result" } ], - "source": [ - "simple.topology.plot()" - ] + "execution_count": 30 }, { "cell_type": "code", - "execution_count": 36, "id": "66", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:54.740689Z", - "start_time": "2025-12-12T12:06:54.669190Z" + "end_time": "2025-12-13T14:13:18.157403Z", + "start_time": "2025-12-13T14:13:18.136357Z" } }, + "source": [ + "complex_sys.topology.plot(title='Complex System Topology')" + ], "outputs": [ { "data": { + "text/plain": [ + "PlotResult(data= Size: 3kB\n", + "Dimensions: (link: 14)\n", + "Coordinates:\n", + " * link (link) ',\n", + " 'label': [BackupBoiler(Gas), BackupBoiler(Heat), CHP(El),\n", + " CHP(Gas), CHP(Heat), ElDemand(El),\n", + " ElectricityExport(El), ElectricityImport(El),\n", + " GasGrid(Gas), HeatDemand(Heat), HeatPump(El),\n", + " HeatPump(Heat), HeatStorage(Charge),\n", + " HeatStorage(Discharge)],\n", + " 'source': [11, 1, 9, 11, 9, 0, 0, 10, 2, 6, 0, 3, 6, 8],\n", + " 'target': [1, 6, 0, 9, 6, 4, 5, 0, 11, 7, 3, 6, 8, 6],\n", + " 'value': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]},\n", + " 'node': {'color': [#FECB52, #19D3F3, #636EFA, #FFA15A, #FF97FF,\n", + " #00CC96, #D62728, #B6E880, #FF6692, #AB63FA,\n", + " #EF553B, #1F77B4],\n", + " 'customdata': [Bus('Electricity',\n", + " carrier='electricity')
inputs:
*\n", + " Flow('ElectricityImport(El)',\n", + " bus='Electricity', size=100.0,\n", + " effects_per_flow_hour={'costs': 0.1-0.2,\n", + " 'CO2': 0.3-0.4})
* Flow('CHP(El)',\n", + " bus='Electricity', size=80.0,\n", + " status_parameters=StatusParameters())
\n", + " outputs:
*\n", + " Flow('ElectricityExport(El)',\n", + " bus='Electricity', size=50.0,\n", + " effects_per_flow_hour={'costs': -0.2--\n", + " 0.1})
* Flow('HeatPump(El)',\n", + " bus='Electricity')
*\n", + " Flow('ElDemand(El)', bus='Electricity',\n", + " size=1.0, fixed_relative_profile=10.0-42.3),\n", + " Boiler('BackupBoiler',\n", + " thermal_efficiency=0.9,\n", + " fuel_flow=Flow('BackupBoiler(Gas)',\n", + " bus='Gas'),\n", + " thermal_flow=Flow('BackupBoiler(Heat)',\n", + " bus='Heat', size=80.0))
inputs:
*\n", + " Flow('BackupBoiler(Gas)', bus='Gas')
\n", + " outputs:
* Flow('BackupBoiler(Heat)',\n", + " bus='Heat', size=80.0), Source('GasGrid')
\n", + " outputs:
* Flow('GasGrid(Gas)',\n", + " bus='Gas', size=300.0,\n", + " effects_per_flow_hour={'costs': 0.1, 'CO2':\n", + " 0.2}), HeatPump('HeatPump', cop=3.5,\n", + " electrical_flow=Flow('HeatPump(El)',\n", + " bus='Electricity'),\n", + " thermal_flow=Flow('HeatPump(Heat)',\n", + " bus='Heat', size=InvestP...)
inputs:
\n", + " * Flow('HeatPump(El)', bus='Electricity')
\n", + " outputs:
* Flow('HeatPump(Heat)',\n", + " bus='Heat',\n", + " size=InvestParameters(minimum_size=0.0,\n", + " maximum_size...), Sink('ElDemand')
\n", + " inputs:
* Flow('ElDemand(El)',\n", + " bus='Electricity', size=1.0,\n", + " fixed_relative_profile=10.0-42.3),\n", + " Sink('ElectricityExport')
inputs:
\n", + " * Flow('ElectricityExport(El)',\n", + " bus='Electricity', size=50.0,\n", + " effects_per_flow_hour={'costs': -0.2--0.1}),\n", + " Bus('Heat', carrier='heat')
inputs:
\n", + " * Flow('CHP(Heat)', bus='Heat', size=85.0,\n", + " status_parameters=StatusParameters())
\n", + " * Flow('HeatPump(Heat)', bus='Heat',\n", + " size=InvestParameters(minimum_size=0.0,\n", + " maximum_size...)
*\n", + " Flow('BackupBoiler(Heat)', bus='Heat',\n", + " size=80.0)
*\n", + " Flow('HeatStorage(Discharge)', bus='Heat',\n", + " size=50.0,\n", + " status_parameters=StatusParameters())
\n", + " outputs:
* Flow('HeatStorage(Charge)',\n", + " bus='Heat', size=50.0,\n", + " status_parameters=StatusParameters())
\n", + " * Flow('HeatDemand(Heat)', bus='Heat',\n", + " size=1.0, fixed_relative_profile=20.0-87.5),\n", + " Sink('HeatDemand')
inputs:
*\n", + " Flow('HeatDemand(Heat)', bus='Heat',\n", + " size=1.0, fixed_relative_profile=20.0-87.5),\n", + " Storage('HeatStorage', capacity_in_flow_hours\n", + " =InvestParameters(minimum_size=0.0,\n", + " maximum_size..., eta_charge=1.0,\n", + " eta_discharge=1.0)
inputs:
*\n", + " Flow('HeatStorage(Charge)', bus='Heat',\n", + " size=50.0,\n", + " status_parameters=StatusParameters())
\n", + " outputs:
*\n", + " Flow('HeatStorage(Discharge)', bus='Heat',\n", + " size=50.0,\n", + " status_parameters=StatusParameters()),\n", + " LinearConverter('CHP', status_parameters=Stat\n", + " usParameters(effects_per_active_hour={'cost..\n", + " ., piecewise_conversion=PiecewiseConversion(p\n", + " iecewises={'Gas': Piecewis...)
\n", + " inputs:
* Flow('CHP(Gas)', bus='Gas',\n", + " size=200.0,\n", + " status_parameters=StatusParameters())
\n", + " outputs:
* Flow('CHP(El)',\n", + " bus='Electricity', size=80.0,\n", + " status_parameters=StatusParameters())
\n", + " * Flow('CHP(Heat)', bus='Heat', size=85.0,\n", + " status_parameters=StatusParameters()),\n", + " Source('ElectricityImport')
outputs:
\n", + " * Flow('ElectricityImport(El)',\n", + " bus='Electricity', size=100.0,\n", + " effects_per_flow_hour={'costs': 0.1-0.2,\n", + " 'CO2': 0.3-0.4}), Bus('Gas',\n", + " carrier='gas')
inputs:
*\n", + " Flow('GasGrid(Gas)', bus='Gas', size=300.0,\n", + " effects_per_flow_hour={'costs': 0.1, 'CO2':\n", + " 0.2})
outputs:
* Flow('CHP(Gas)',\n", + " bus='Gas', size=200.0,\n", + " status_parameters=StatusParameters())
\n", + " * Flow('BackupBoiler(Gas)', bus='Gas')],\n", + " 'hovertemplate': '%{customdata}',\n", + " 'label': [Electricity, BackupBoiler, GasGrid, HeatPump,\n", + " ElDemand, ElectricityExport, Heat, HeatDemand,\n", + " HeatStorage, CHP, ElectricityImport, Gas],\n", + " 'line': {'color': 'black', 'width': 0.5},\n", + " 'pad': 15,\n", + " 'thickness': 20},\n", + " 'type': 'sankey'}],\n", + " 'layout': {'template': '...', 'title': {'text': 'Complex System Topology'}}\n", + "}))" + ], "text/html": [ - "
" + "
\n", + "
" ] }, - "jetTransient": { - "display_id": null - }, + "execution_count": 31, "metadata": {}, - "output_type": "display_data" + "output_type": "execute_result" } ], - "source": [ - "complex_sys.topology.plot(title='Complex System Topology')" - ] + "execution_count": 31 }, { "cell_type": "markdown", @@ -5987,14 +4882,24 @@ }, { "cell_type": "code", - "execution_count": 37, "id": "68", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:54.957830Z", - "start_time": "2025-12-12T12:06:54.902442Z" + "end_time": "2025-12-13T14:13:18.168871Z", + "start_time": "2025-12-13T14:13:18.165083Z" } }, + "source": [ + "nodes, edges = simple.topology.infos()\n", + "\n", + "print('Nodes:')\n", + "for label, info in nodes.items():\n", + " print(f' {label}: {info[\"class\"]}')\n", + "\n", + "print('\\nEdges (flows):')\n", + "for label, info in edges.items():\n", + " print(f' {info[\"start\"]} -> {info[\"end\"]}: {label}')" + ], "outputs": [ { "name": "stdout", @@ -6018,17 +4923,7 @@ ] } ], - "source": [ - "nodes, edges = simple.topology.infos()\n", - "\n", - "print('Nodes:')\n", - "for label, info in nodes.items():\n", - " print(f' {label}: {info[\"class\"]}')\n", - "\n", - "print('\\nEdges (flows):')\n", - "for label, info in edges.items():\n", - " print(f' {info[\"start\"]} -> {info[\"end\"]}: {label}')" - ] + "execution_count": 32 }, { "cell_type": "markdown", @@ -6042,14 +4937,19 @@ }, { "cell_type": "code", - "execution_count": 38, "id": "70", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:55.088528Z", - "start_time": "2025-12-12T12:06:55.064186Z" + "end_time": "2025-12-13T14:13:18.194588Z", + "start_time": "2025-12-13T14:13:18.191374Z" } }, + "source": [ + "print('Multiperiod system dimensions:')\n", + "print(f' Periods: {list(multiperiod.periods)}')\n", + "print(f' Scenarios: {list(multiperiod.scenarios)}')\n", + "print(f' Solution dims: {dict(multiperiod.solution.sizes)}')" + ], "outputs": [ { "name": "stdout", @@ -6062,30 +4962,24 @@ ] } ], - "source": [ - "print('Multiperiod system dimensions:')\n", - "print(f' Periods: {list(multiperiod.periods)}')\n", - "print(f' Scenarios: {list(multiperiod.scenarios)}')\n", - "print(f' Solution dims: {dict(multiperiod.solution.sizes)}')" - ] + "execution_count": 33 }, { "cell_type": "code", - "execution_count": 39, "id": "71", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:57.157602Z", - "start_time": "2025-12-12T12:06:55.650661Z" + "end_time": "2025-12-13T14:13:18.325331Z", + "start_time": "2025-12-13T14:13:18.199791Z" } }, + "source": [ + "# Balance plot with faceting by scenario\n", + "multiperiod.statistics.plot.balance('Heat')" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ "PlotResult(data= Size: 10kB\n", "Dimensions: (time: 49, period: 3, scenario: 2)\n", @@ -6094,9 +4988,9 @@ " * period (period) int64 24B 2024 2025 2026\n", " * scenario (scenario) scena' ... '}
value=%{y}'),\n", " 'legendgroup': 'Boiler(Heat)',\n", @@ -6132,7 +5026,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x4',\n", - " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'rxMNlDwFS20eeOpEfAAAAAAAAA+P8='),\n", + " 'y': {'bdata': ('5JuWpeU9RsDiqeLGgqdEwF3XQkqFnk' ... 'rxMNlDwFu20eeOpEfAAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y4'},\n", " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", @@ -6169,7 +5063,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x5',\n", - " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'rxMNlDwGC20eeOpEfAAAAAAAAA+P8='),\n", + " 'y': {'bdata': ('5JuWpeU9RsDiqeLGgqdEwF3XQkqFnk' ... 'rxMNlDwFu20eeOpEfAAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y5'},\n", " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", @@ -6206,7 +5100,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x6',\n", - " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'rxMNlDwGC20eeOpEfAAAAAAAAA+P8='),\n", + " 'y': {'bdata': ('5JuWpeU9RsDiqeLGgqdEwFvXQkqFnk' ... 'rxMNlDwFy20eeOpEfAAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y6'},\n", " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", @@ -6243,7 +5137,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'Vm3JI8wDyyyUAFXDnAAAAAAAAA+P8='),\n", + " 'y': {'bdata': ('EgPMGubHPsD7i30z/HU4wBwgRYDluD' ... 'Vm3JI8wDayyUAFXDnAAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", @@ -6280,7 +5174,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x2',\n", - " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'Vm3JI8wDyyyUAFXDnAAAAAAAAA+P8='),\n", + " 'y': {'bdata': ('EgPMGubHPsD7i30z/HU4wBwgRYDluD' ... 'Vm3JI8wDayyUAFXDnAAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y2'},\n", " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n", @@ -6317,7 +5211,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x3',\n", - " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'Vm3JI8wDyyyUAFXDnAAAAAAAAA+P8='),\n", + " 'y': {'bdata': ('EgPMGubHPsD7i30z/HU4wBwgRYDluD' ... 'Vm3JI8wDayyUAFXDnAAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y3'},\n", " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", @@ -6354,7 +5248,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x4',\n", - " 'y': {'bdata': ('5ZuWpeU9RsDsqeLGgqdEwEfXQkqFnk' ... 'JRKxBKPe7+DqGhoTi9AAAAAAAA+P8='),\n", + " 'y': {'bdata': ('iAK1fqVASD1j/UqBWr9nPQo++OCDj2' ... 'jgg89hPWP9SoFav2g9AAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y4'},\n", " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", @@ -6391,7 +5285,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x5',\n", - " 'y': {'bdata': ('5ZuWpeU9RsDsqeLGgqdEwEfXQkqFnk' ... '1P1R87PWP9SoFav0e9AAAAAAAA+P8='),\n", + " 'y': {'bdata': ('iAK1fqVASD1j/UqBWr9nPQo++OCDj2' ... 'qBWr9oPWP9SoFav2g9AAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y5'},\n", " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", @@ -6428,7 +5322,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x6',\n", - " 'y': {'bdata': ('5ZuWpeU9RsDPqeLGgqdEwFTXQkqFnk' ... 'AAAAAAgGP9SoFav0e9AAAAAAAA+P8='),\n", + " 'y': {'bdata': ('iAK1fqVASD1j/UqBWr9oPby8nSExr2' ... 'qBWr9oPQo++OCDz2E9AAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y6'},\n", " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", @@ -6465,7 +5359,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('EgPMGubHPsAnjH0z/HU4wAkgRYDluD' ... 'j1K22OPWP9SoFavzg9AAAAAAAA+P8='),\n", + " 'y': {'bdata': ('AAAAAAAAAIC3nSExb8dkPbedITFvx2' ... 'Exb8dkPbedITFvx2Q9AAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", @@ -6502,7 +5396,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x2',\n", - " 'y': {'bdata': ('EgPMGubHPsCfi30z/HU4wDogRYDluD' ... '2guF0/PWP9SoFavzg9AAAAAAAA+P8='),\n", + " 'y': {'bdata': ('AAAAAAAAAIC3nSExb8dkPbedITFvx2' ... 'Exb8dkPbedITFvx2Q9AAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y2'},\n", " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n", @@ -6539,7 +5433,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x3',\n", - " 'y': {'bdata': ('EgPMGubHPsCfi30z/HU4wDogRYDluD' ... 'DGEbwovWP9SoFavzg9AAAAAAAA+P8='),\n", + " 'y': {'bdata': ('AAAAAAAAAIC3nSExb8dkPbedITFvx2' ... 'Exb8dkPbedITFvp2U9AAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y3'},\n", " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", @@ -6576,7 +5470,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x4',\n", - " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAoPWP9SoFav0' ... 'SjViA+vW73dwgNDQU9AAAAAAAA+H8='),\n", + " 'y': {'bdata': ('iAK1fqVASb1j/UqBWr9ovQo++OCDT2' ... 'jgg49ivWP9SoFav2m9AAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y4'},\n", " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", @@ -6613,7 +5507,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x5',\n", - " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAoPWP9SoFav0' ... '6n6o9DvWP9SoFav0g9AAAAAAAA+H8='),\n", + " 'y': {'bdata': ('iAK1fqVASb1j/UqBWr9ovQo++OCDT2' ... 'qBWr9pvWP9SoFav2m9AAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y5'},\n", " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", @@ -6650,7 +5544,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x6',\n", - " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAABHvWT9SoFavz' ... 'AAAAAAAGP9SoFav0g9AAAAAAAA+H8='),\n", + " 'y': {'bdata': ('iAK1fqVASb1j/UqBWr9pvby8nSEx72' ... 'qBWr9pvQo++OCDj2K9AAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y6'},\n", " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", @@ -6687,7 +5581,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('AAAAAAAAAAAKPvjgg49CPQAAAAAAAD' ... 'j1K7WPvWP9SoFavzm9AAAAAAAA+H8='),\n", + " 'y': {'bdata': ('AAAAAAAAAAC3nSExb6dlvbedITFvp2' ... 'Exb6dlvbedITFvp2W9AAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", @@ -6724,7 +5618,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x2',\n", - " 'y': {'bdata': ('AAAAAAAAAABj/UqBWr9YvQAAAAAAAD' ... '2guF0xvWP9SoFavzm9AAAAAAAA+H8='),\n", + " 'y': {'bdata': ('AAAAAAAAAAC3nSExb6dlvbedITFvp2' ... 'Exb6dlvbedITFvp2W9AAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y2'},\n", " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n", @@ -6761,7 +5655,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x3',\n", - " 'y': {'bdata': ('AAAAAAAAAABj/UqBWr9YvQAAAAAAAD' ... '9y3IcWvWP9SoFavzm9AAAAAAAA+H8='),\n", + " 'y': {'bdata': ('AAAAAAAAAAC3nSExb6dlvbedITFvp2' ... 'Exb6dlvbedITFvh2a9AAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y3'},\n", " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n", @@ -7058,44 +5952,44 @@ " 'yaxis5': {'anchor': 'x5', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False},\n", " 'yaxis6': {'anchor': 'x6', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False}}\n", "}))" + ], + "text/html": [ + "
\n", + "
" ] }, - "execution_count": 39, + "execution_count": 34, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "# Balance plot with faceting by scenario\n", - "multiperiod.statistics.plot.balance('Heat')" - ] + "execution_count": 34 }, { "cell_type": "code", - "execution_count": 40, "id": "72", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:57.734537Z", - "start_time": "2025-12-12T12:06:57.451036Z" + "end_time": "2025-12-13T14:13:18.395048Z", + "start_time": "2025-12-13T14:13:18.341709Z" } }, + "source": [ + "# Filter to specific scenario/period\n", + "multiperiod.statistics.plot.balance('Heat', select={'scenario': 'high_demand', 'period': 2024})" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ "PlotResult(data= Size: 2kB\n", "Dimensions: (time: 49)\n", "Coordinates:\n", " * time (time) datetime64[ns] 392B 2024-01-01 ... 2024...\n", "Data variables:\n", - " Boiler(Heat) (time) float64 392B -0.0 -0.0 -0.0 ... -47.29 nan\n", - " ThermalStorage(Discharge) (time) float64 392B -44.48 -41.31 ... nan\n", - " ThermalStorage(Charge) (time) float64 392B 0.0 4.263e-14 ... nan\n", + " Boiler(Heat) (time) float64 392B -44.48 -41.31 ... -47.29 nan\n", + " ThermalStorage(Discharge) (time) float64 392B 1.723e-13 6.749e-13 ... nan\n", + " ThermalStorage(Charge) (time) float64 392B -1.794e-13 -7.034e-13 ... nan\n", " Building(Heat) (time) float64 392B 44.48 41.31 ... 47.29 nan, figure=Figure({\n", " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n", " 'legendgroup': 'Boiler(Heat)',\n", @@ -7131,7 +6025,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'rxMNlDwFS20eeOpEfAAAAAAAAA+P8='),\n", + " 'y': {'bdata': ('5JuWpeU9RsDiqeLGgqdEwF3XQkqFnk' ... 'rxMNlDwFu20eeOpEfAAAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n", @@ -7168,7 +6062,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('5ZuWpeU9RsDsqeLGgqdEwEfXQkqFnk' ... 'JRKxBKPe7+DqGhoTi9AAAAAAAA+P8='),\n", + " 'y': {'bdata': ('iAK1fqVASD1j/UqBWr9nPQo++OCDj2' ... 'jgg89hPWP9SoFav2g9AAAAAAAA+P8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n", @@ -7205,7 +6099,7 @@ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n", " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n", " 'xaxis': 'x',\n", - " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAoPWP9SoFav0' ... 'SjViA+vW73dwgNDQU9AAAAAAAA+H8='),\n", + " 'y': {'bdata': ('iAK1fqVASb1j/UqBWr9ovQo++OCDT2' ... 'jgg49ivWP9SoFav2m9AAAAAAAA+H8='),\n", " 'dtype': 'f8'},\n", " 'yaxis': 'y'},\n", " {'hovertemplate': 'variable=Building(Heat)
time=%{x}
value=%{y}',\n", @@ -7254,77 +6148,74 @@ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", "}))" + ], + "text/html": [ + "
\n", + "
" ] }, - "execution_count": 40, + "execution_count": 35, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "# Filter to specific scenario/period\n", - "multiperiod.statistics.plot.balance('Heat', select={'scenario': 'high_demand', 'period': 2024})" - ] + "execution_count": 35 }, { "cell_type": "code", - "execution_count": 41, "id": "73", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:58.022014Z", - "start_time": "2025-12-12T12:06:57.778237Z" + "end_time": "2025-12-13T14:13:18.481894Z", + "start_time": "2025-12-13T14:13:18.459661Z" } }, + "source": [ + "# Sankey aggregates across all dimensions by default\n", + "multiperiod.statistics.plot.sankey.flows()" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ - "PlotResult(data= Size: 1kB\n", - "Dimensions: (link: 5)\n", + "PlotResult(data= Size: 592B\n", + "Dimensions: (link: 4)\n", "Coordinates:\n", - " * link (link) int64 40B 0 1 2 3 4\n", - " source (link) \n", + "
" ] }, - "execution_count": 41, + "execution_count": 36, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "# Sankey aggregates across all dimensions by default\n", - "multiperiod.statistics.plot.sankey.flows()" - ] + "execution_count": 36 }, { "cell_type": "markdown", @@ -7338,21 +6229,20 @@ }, { "cell_type": "code", - "execution_count": 42, "id": "75", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:58.557731Z", - "start_time": "2025-12-12T12:06:58.171959Z" + "end_time": "2025-12-13T14:13:18.553613Z", + "start_time": "2025-12-13T14:13:18.488703Z" } }, + "source": [ + "# Using a colorscale name\n", + "simple.statistics.plot.balance('Heat', colors='Set2')" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ "PlotResult(data= Size: 7kB\n", "Dimensions: (time: 169)\n", @@ -7760,35 +6650,35 @@ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", "}))" + ], + "text/html": [ + "
\n", + "
" ] }, - "execution_count": 42, + "execution_count": 37, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "# Using a colorscale name\n", - "simple.statistics.plot.balance('Heat', colors='Set2')" - ] + "execution_count": 37 }, { "cell_type": "code", - "execution_count": 43, "id": "76", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:59.181165Z", - "start_time": "2025-12-12T12:06:58.735466Z" + "end_time": "2025-12-13T14:13:18.619651Z", + "start_time": "2025-12-13T14:13:18.562286Z" } }, + "source": [ + "# Using a list of colors\n", + "simple.statistics.plot.balance('Heat', colors=['#e41a1c', '#377eb8', '#4daf4a', '#984ea3'])" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ "PlotResult(data= Size: 7kB\n", "Dimensions: (time: 169)\n", @@ -8196,35 +7086,43 @@ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", "}))" + ], + "text/html": [ + "
\n", + "
" ] }, - "execution_count": 43, + "execution_count": 38, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "# Using a list of colors\n", - "simple.statistics.plot.balance('Heat', colors=['#e41a1c', '#377eb8', '#4daf4a', '#984ea3'])" - ] + "execution_count": 38 }, { "cell_type": "code", - "execution_count": 44, "id": "77", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:06:59.730556Z", - "start_time": "2025-12-12T12:06:59.234563Z" + "end_time": "2025-12-13T14:13:18.672843Z", + "start_time": "2025-12-13T14:13:18.628572Z" } }, + "source": [ + "# Using a dictionary for specific labels\n", + "simple.statistics.plot.balance(\n", + " 'Heat',\n", + " colors={\n", + " 'Boiler(Heat)': 'orangered',\n", + " 'ThermalStorage(Charge)': 'steelblue',\n", + " 'ThermalStorage(Discharge)': 'lightblue',\n", + " 'Office(Heat)': 'forestgreen',\n", + " },\n", + ")" + ], "outputs": [ { "data": { - "text/html": [ - "
\n", - "
" - ], "text/plain": [ "PlotResult(data= Size: 7kB\n", "Dimensions: (time: 169)\n", @@ -8632,25 +7530,18 @@ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n", " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n", "}))" + ], + "text/html": [ + "
\n", + "
" ] }, - "execution_count": 44, + "execution_count": 39, "metadata": {}, "output_type": "execute_result" } ], - "source": [ - "# Using a dictionary for specific labels\n", - "simple.statistics.plot.balance(\n", - " 'Heat',\n", - " colors={\n", - " 'Boiler(Heat)': 'orangered',\n", - " 'ThermalStorage(Charge)': 'steelblue',\n", - " 'ThermalStorage(Discharge)': 'lightblue',\n", - " 'Office(Heat)': 'forestgreen',\n", - " },\n", - ")" - ] + "execution_count": 39 }, { "cell_type": "markdown", @@ -8664,14 +7555,21 @@ }, { "cell_type": "code", - "execution_count": 45, "id": "79", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:07:00.118627Z", - "start_time": "2025-12-12T12:06:59.813869Z" + "end_time": "2025-12-13T14:13:18.710193Z", + "start_time": "2025-12-13T14:13:18.681521Z" } }, + "source": [ + "# Get plot result\n", + "result = simple.statistics.plot.balance('Heat')\n", + "\n", + "print('PlotResult contains:')\n", + "print(f' data: {type(result.data).__name__} with vars {list(result.data.data_vars)}')\n", + "print(f' figure: {type(result.figure).__name__}')" + ], "outputs": [ { "name": "stdout", @@ -8683,28 +7581,42 @@ ] } ], - "source": [ - "# Get plot result\n", - "result = simple.statistics.plot.balance('Heat')\n", - "\n", - "print('PlotResult contains:')\n", - "print(f' data: {type(result.data).__name__} with vars {list(result.data.data_vars)}')\n", - "print(f' figure: {type(result.figure).__name__}')" - ] + "execution_count": 40 }, { "cell_type": "code", - "execution_count": 46, "id": "80", "metadata": { "ExecuteTime": { - "end_time": "2025-12-12T12:07:00.477422Z", - "start_time": "2025-12-12T12:07:00.433079Z" + "end_time": "2025-12-13T14:13:18.736577Z", + "start_time": "2025-12-13T14:13:18.723621Z" } }, + "source": [ + "# Export data to pandas DataFrame\n", + "df = result.data.to_dataframe()\n", + "df.head()" + ], "outputs": [ { "data": { + "text/plain": [ + " Boiler(Heat) ThermalStorage(Discharge) \\\n", + "time \n", + "2024-01-15 00:00:00 -32.483571 -0.000000e+00 \n", + "2024-01-15 01:00:00 -29.308678 5.275242e-13 \n", + "2024-01-15 02:00:00 -33.238443 -7.086767e-13 \n", + "2024-01-15 03:00:00 -101.411593 -3.516828e-13 \n", + "2024-01-15 04:00:00 -128.829233 -5.613288e-13 \n", + "\n", + " ThermalStorage(Charge) Office(Heat) \n", + "time \n", + "2024-01-15 00:00:00 0.000000e+00 32.483571 \n", + "2024-01-15 01:00:00 -3.747575e-13 29.308678 \n", + "2024-01-15 02:00:00 8.792069e-13 33.238443 \n", + "2024-01-15 03:00:00 6.379644e+01 37.615149 \n", + "2024-01-15 04:00:00 1.000000e+02 28.829233 " + ], "text/html": [ "
\n", "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
 Time [s]Cost [€]Cost Gap [%]
Method   
Full optimization70.871,540,2000.00
Rolling horizon30.921,540,6760.03
\n" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 6 + }, + { + "cell_type": "markdown", + "id": "12", + "metadata": {}, + "source": [ + "## Visualize: Heat Balance Comparison" + ] + }, + { + "cell_type": "code", + "id": "13", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T19:03:28.570509Z", + "start_time": "2025-12-13T19:03:27.661Z" + } + }, + "source": [ + "fs_full.statistics.plot.balance('Heat').figure.update_layout(title='Heat Balance (Full)')" + ], + "outputs": [ + { + "data": { + "text/html": [ + " \n", + " \n", + " " + ] + }, + "metadata": {}, + "output_type": "display_data", + "jetTransient": { + "display_id": null + } + }, + { + "data": { + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data", + "jetTransient": { + "display_id": null + } + } + ], + "execution_count": 7 + }, + { + "cell_type": "code", + "id": "14", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T19:03:29.994610Z", + "start_time": "2025-12-13T19:03:29.772272Z" + } + }, + "source": [ + "fs_rolling.statistics.plot.balance('Heat').figure.update_layout(title='Heat Balance (Rolling)')" + ], + "outputs": [ + { + "data": { + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data", + "jetTransient": { + "display_id": null + } + } + ], + "execution_count": 8 + }, + { + "cell_type": "markdown", + "id": "15", + "metadata": {}, + "source": [ + "## Storage State Continuity\n", + "\n", + "Rolling horizon transfers storage charge states between segments to ensure continuity:" + ] + }, + { + "cell_type": "code", + "id": "16", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T19:03:30.194568Z", + "start_time": "2025-12-13T19:03:30.141045Z" + } + }, + "source": [ + "fig = make_subplots(\n", + " rows=2, cols=1, shared_xaxes=True, vertical_spacing=0.1, subplot_titles=['Full Optimization', 'Rolling Horizon']\n", + ")\n", + "\n", + "# Full optimization\n", + "charge_full = fs_full.solution['Storage|charge_state'].values[:-1] # Drop final value\n", + "fig.add_trace(go.Scatter(x=timesteps, y=charge_full, name='Full', line=dict(color='blue')), row=1, col=1)\n", + "\n", + "# Rolling horizon\n", + "charge_rolling = fs_rolling.solution['Storage|charge_state'].values[:-1]\n", + "fig.add_trace(go.Scatter(x=timesteps, y=charge_rolling, name='Rolling', line=dict(color='orange')), row=2, col=1)\n", + "\n", + "fig.update_yaxes(title_text='Charge State [MWh]', row=1, col=1)\n", + "fig.update_yaxes(title_text='Charge State [MWh]', row=2, col=1)\n", + "fig.update_layout(height=400, showlegend=False)\n", + "fig.show()" + ], + "outputs": [ + { + "data": { + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data", + "jetTransient": { + "display_id": null + } + } + ], + "execution_count": 9 + }, + { + "cell_type": "markdown", + "id": "17", + "metadata": {}, + "source": [ + "## Inspect Individual Segments\n", + "\n", + "The method returns the individual segment FlowSystems, which can be inspected:" + ] + }, + { + "cell_type": "code", + "id": "18", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T19:03:30.246423Z", + "start_time": "2025-12-13T19:03:30.228470Z" + } + }, + "source": [ + "print(f'Number of segments: {len(segments)}')\n", + "print()\n", + "for i, seg in enumerate(segments):\n", + " start_time = seg.timesteps[0]\n", + " end_time = seg.timesteps[-1]\n", + " cost = seg.solution['costs'].item()\n", + " print(\n", + " f'Segment {i + 1}: {start_time.strftime(\"%Y-%m-%d %H:%M\")} → {end_time.strftime(\"%Y-%m-%d %H:%M\")} | Cost: {cost:,.0f} €'\n", + " )" + ], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of segments: 7\n", + "\n", + "Segment 1: 2020-01-01 00:00 → 2020-01-03 23:45 | Cost: 318,658 €\n", + "Segment 2: 2020-01-03 00:00 → 2020-01-05 23:45 | Cost: 275,399 €\n", + "Segment 3: 2020-01-05 00:00 → 2020-01-07 23:45 | Cost: 335,051 €\n", + "Segment 4: 2020-01-07 00:00 → 2020-01-09 23:45 | Cost: 406,345 €\n", + "Segment 5: 2020-01-09 00:00 → 2020-01-11 23:45 | Cost: 356,730 €\n", + "Segment 6: 2020-01-11 00:00 → 2020-01-13 23:45 | Cost: 275,606 €\n", + "Segment 7: 2020-01-13 00:00 → 2020-01-14 23:45 | Cost: 270,066 €\n" + ] + } + ], + "execution_count": 10 + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": {}, + "source": "## Visualize Segment Overlaps\n\nUnderstanding how segments overlap is key to tuning rolling horizon. Let's visualize the flow rates from each segment including their overlap regions:" + }, + { + "cell_type": "code", + "id": "20", + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T19:03:30.528986Z", + "start_time": "2025-12-13T19:03:30.272546Z" + } + }, + "source": [ + "# Concatenate all segment solutions into one dataset (including overlaps)\n", + "ds = xr.concat([seg.solution for seg in segments], dim=pd.RangeIndex(len(segments), name='segment'), join='outer')\n", + "\n", + "# Plot CHP thermal flow across all segments - each segment as a separate line\n", + "px.line(\n", + " ds['Boiler(Q_th)|flow_rate'].to_pandas().T,\n", + " labels={'value': 'Boiler Thermal Output [MW]', 'index': 'Timestep'},\n", + ")" + ], + "outputs": [ + { + "data": { + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data", + "jetTransient": { + "display_id": null + } + } + ], + "execution_count": 11 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-12-13T19:03:30.963250Z", + "start_time": "2025-12-13T19:03:30.651056Z" + } + }, + "cell_type": "code", + "source": [ + "px.line(\n", + " ds['Storage|charge_state'].to_pandas().T,\n", + " labels={'value': 'Storage Charge State [MW]', 'index': 'Timestep'},\n", + ")" + ], + "id": "d7c660381f2190e0", + "outputs": [ + { + "data": { + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data", + "jetTransient": { + "display_id": null + } + } + ], + "execution_count": 12 + }, + { + "cell_type": "markdown", + "id": "21", + "metadata": {}, + "source": [ + "## When to Use Rolling Horizon\n", + "\n", + "| Use Case | Recommendation |\n", + "|----------|----------------|\n", + "| **Memory limits** | Large problems that exceed available memory |\n", + "| **Operational planning** | When limited foresight is realistic |\n", + "| **Quick approximate solutions** | Faster than full optimization |\n", + "| **Investment decisions** | Use full optimization instead |\n", + "\n", + "### Limitations\n", + "\n", + "- **No investments**: `InvestParameters` are not supported (raises error)\n", + "- **Suboptimal storage**: Limited foresight may miss long-term storage opportunities\n", + "- **Global constraints**: `flow_hours_max` etc. cannot be enforced globally" + ] + }, + { + "cell_type": "markdown", + "id": "22", + "metadata": {}, + "source": [ + "## API Reference\n", + "\n", + "```python\n", + "segments = flow_system.optimize.rolling_horizon(\n", + " solver, # Solver instance\n", + " horizon=192, # Timesteps per segment (e.g., 2 days at 15-min resolution)\n", + " overlap=48, # Additional lookahead timesteps (e.g., 12 hours)\n", + " nr_of_previous_values=1, # Flow history for uptime/downtime tracking\n", + ")\n", + "\n", + "# Combined solution on original FlowSystem\n", + "flow_system.solution['costs'].item()\n", + "\n", + "# Individual segment solutions\n", + "for seg in segments:\n", + " print(seg.solution['costs'].item())\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "23", + "metadata": {}, + "source": "## Summary\n\nYou learned how to:\n\n- Use **`optimize.rolling_horizon()`** to decompose large problems\n- Choose **horizon** and **overlap** parameters\n- Understand the **trade-offs** vs. full optimization\n\n### Key Takeaways\n\n1. **Rolling horizon** is useful for memory-limited or operational planning problems\n2. **Overlap** improves solution quality at the cost of computation time\n3. **Storage states** are automatically transferred between segments\n4. Use **full optimization** for investment decisions\n\n### Related Notebooks\n\n- **[08a-Aggregation](08a-aggregation.ipynb)**: For investment problems, use time series aggregation (resampling, clustering) instead" + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/notebooks/10-transmission.ipynb b/docs/notebooks/10-transmission.ipynb index 898d092c0..8c74e4e8c 100644 --- a/docs/notebooks/10-transmission.ipynb +++ b/docs/notebooks/10-transmission.ipynb @@ -384,22 +384,7 @@ "cell_type": "markdown", "id": "29", "metadata": {}, - "source": [ - "## Summary\n", - "\n", - "You learned how to:\n", - "\n", - "- Create **unidirectional transmission** between two buses\n", - "- Model **bidirectional transmission** with flow direction constraints\n", - "- Apply **relative and absolute losses** to transmission\n", - "- Optimize **transmission capacity** using InvestParameters\n", - "- Analyze **multi-site energy systems** with interconnections\n", - "\n", - "### Next Steps\n", - "\n", - "- **[07-scenarios-and-periods](07-scenarios-and-periods.ipynb)**: Multi-year planning with uncertainty\n", - "- **[08-large-scale-optimization](08-large-scale-optimization.ipynb)**: Computational efficiency techniques" - ] + "source": "## Summary\n\nYou learned how to:\n\n- Create **unidirectional transmission** between two buses\n- Model **bidirectional transmission** with flow direction constraints\n- Apply **relative and absolute losses** to transmission\n- Optimize **transmission capacity** using InvestParameters\n- Analyze **multi-site energy systems** with interconnections\n\n### Next Steps\n\n- **[07-scenarios-and-periods](07-scenarios-and-periods.ipynb)**: Multi-year planning with uncertainty\n- **[08a-Aggregation](08a-aggregation.ipynb)**: Speed up large problems with time series aggregation\n- **[08b-Rolling Horizon](08b-rolling-horizon.ipynb)**: Decompose large problems into sequential segments" } ], "metadata": { diff --git a/docs/notebooks/index.md b/docs/notebooks/index.md index 067d5247b..233f6be1b 100644 --- a/docs/notebooks/index.md +++ b/docs/notebooks/index.md @@ -36,7 +36,8 @@ Learn flixopt through practical examples organized by topic. Each notebook inclu | Notebook | Description | |----------|-------------| | [07-Scenarios](07-scenarios-and-periods.ipynb) | Multi-year planning with uncertain demand scenarios | -| [08-Large-Scale](08-large-scale-optimization.ipynb) | Speed up large problems with resampling and two-stage optimization | +| [08a-Aggregation](08a-aggregation.ipynb) | Speed up large problems with resampling and two-stage optimization | +| [08b-Rolling Horizon](08b-rolling-horizon.ipynb) | Decompose large problems into sequential time segments | ## Results @@ -58,5 +59,6 @@ Learn flixopt through practical examples organized by topic. Each notebook inclu | `PiecewiseConversion`, part-load efficiency | Piecewise Conversion | | `PiecewiseEffects`, economies of scale | Piecewise Effects | | Periods, scenarios, weights | Scenarios | -| `transform.resample()`, `fix_sizes()` | Large-Scale | +| `transform.resample()`, `fix_sizes()` | Aggregation | +| `optimize.rolling_horizon()` | Rolling Horizon | | `statistics`, `topology`, plotting | Plotting | diff --git a/docs/user-guide/migration-guide-v5.md b/docs/user-guide/migration-guide-v5.md index 5c19761e0..0c43e18f0 100644 --- a/docs/user-guide/migration-guide-v5.md +++ b/docs/user-guide/migration-guide-v5.md @@ -325,17 +325,27 @@ Clustered optimization uses the new transform accessor: # Results in clustered_fs.solution ``` -### Segmented Optimization (Not Yet Migrated) +### Segmented / Rolling Horizon Optimization -Segmented optimization still uses the class-based API. A new `optimize.rolling()` method is planned for a future release. +=== "v4.x (Old)" + ```python + calc = fx.SegmentedOptimization('model', flow_system, + timesteps_per_segment=96) + calc.do_modeling_and_solve(solver) + results = calc.results # Returns SegmentedResults + ``` -```python -# Still use the class-based API (unchanged from v4.x) -calc = fx.SegmentedOptimization('model', flow_system, - timesteps_per_segment=96) -calc.do_modeling_and_solve(solver) -results = calc.results # Returns SegmentedResults -``` +=== "v5.0.0 (New)" + ```python + # Use optimize.rolling_horizon() method + segments = flow_system.optimize.rolling_horizon( + solver, + horizon=96, # Timesteps per segment + overlap=12, # Lookahead for storage optimization + ) + # Combined solution on original FlowSystem + flow_system.solution['costs'].item() + ``` --- diff --git a/docs/user-guide/support.md b/docs/user-guide/support.md index 517a353a1..eba27c616 100644 --- a/docs/user-guide/support.md +++ b/docs/user-guide/support.md @@ -16,7 +16,7 @@ When opening an issue, include: - [FAQ](faq.md) — Common questions - [Troubleshooting](troubleshooting.md) — Common issues - [Examples](../notebooks/index.md) — Working code -- [API Reference](../api-reference/index.md) — Technical docs +- [API Reference](../api-reference/) — Technical docs ## Contributing diff --git a/flixopt/optimize_accessor.py b/flixopt/optimize_accessor.py index 5428cd855..f88cdf982 100644 --- a/flixopt/optimize_accessor.py +++ b/flixopt/optimize_accessor.py @@ -7,12 +7,22 @@ from __future__ import annotations +import logging +import sys from typing import TYPE_CHECKING +import xarray as xr +from tqdm import tqdm + +from .config import CONFIG +from .io import suppress_output + if TYPE_CHECKING: from .flow_system import FlowSystem from .solvers import _Solver +logger = logging.getLogger('flixopt') + class OptimizeAccessor: """ @@ -28,10 +38,10 @@ class OptimizeAccessor: >>> flow_system.optimize(solver) >>> print(flow_system.solution) - Future specialized modes: + Rolling horizon optimization: - >>> flow_system.optimize.clustered(solver, aggregation=params) - >>> flow_system.optimize.mga(solver, alternatives=5) + >>> segments = flow_system.optimize.rolling_horizon(solver, horizon=168) + >>> print(flow_system.solution) # Combined result """ def __init__(self, flow_system: FlowSystem) -> None: @@ -79,13 +89,273 @@ def __call__(self, solver: _Solver, normalize_weights: bool = True) -> FlowSyste self._fs.solve(solver) return self._fs - # Future methods can be added here: - # - # def clustered(self, solver: _Solver, aggregation: AggregationParameters, - # normalize_weights: bool = True) -> FlowSystem: - # """Clustered optimization with time aggregation.""" - # ... - # - # def mga(self, solver: _Solver, alternatives: int = 5) -> FlowSystem: - # """Modeling to Generate Alternatives.""" - # ... + def rolling_horizon( + self, + solver: _Solver, + horizon: int = 100, + overlap: int = 0, + nr_of_previous_values: int = 1, + ) -> list[FlowSystem]: + """ + Solve the optimization using a rolling horizon approach. + + Divides the time horizon into overlapping segments that are solved sequentially. + Each segment uses final values from the previous segment as initial conditions, + ensuring dynamic continuity across the solution. The combined solution is stored + on the original FlowSystem. + + This approach is useful for: + - Large-scale problems that exceed memory limits + - Annual planning with seasonal variations + - Operational planning with limited foresight + + Args: + solver: The solver to use (e.g., HighsSolver, GurobiSolver). + horizon: Number of timesteps in each segment (excluding overlap). + Must be > 2. Larger values provide better optimization at the cost + of memory and computation time. Default: 100. + overlap: Number of additional timesteps added to each segment for lookahead. + Improves storage optimization by providing foresight. Higher values + improve solution quality but increase computational cost. Default: 0. + nr_of_previous_values: Number of previous timestep values to transfer between + segments for initialization (e.g., for uptime/downtime tracking). Default: 1. + + Returns: + List of segment FlowSystems, each with their individual solution. + The combined solution (with overlaps trimmed) is stored on the original FlowSystem. + + Raises: + ValueError: If horizon <= 2 or overlap < 0. + ValueError: If horizon + overlap > total timesteps. + ValueError: If InvestParameters are used (not supported in rolling horizon). + + Examples: + Basic rolling horizon optimization: + + >>> segments = flow_system.optimize.rolling_horizon( + ... solver, + ... horizon=168, # Weekly segments + ... overlap=24, # 1-day lookahead + ... ) + >>> print(flow_system.solution) # Combined result + + Inspect individual segments: + + >>> for i, seg in enumerate(segments): + ... print(f'Segment {i}: {seg.solution["costs"].item():.2f}') + + Note: + - InvestParameters are not supported as investment decisions require + full-horizon optimization. + - Global constraints (flow_hours_max, etc.) may produce suboptimal results + as they cannot be enforced globally across segments. + - Storage optimization may be suboptimal compared to full-horizon solutions + due to limited foresight in each segment. + """ + + # Validation + if horizon <= 2: + raise ValueError('horizon must be greater than 2 to avoid internal side effects.') + if overlap < 0: + raise ValueError('overlap must be non-negative.') + if nr_of_previous_values < 0: + raise ValueError('nr_of_previous_values must be non-negative.') + if nr_of_previous_values > horizon: + raise ValueError('nr_of_previous_values cannot exceed horizon.') + + total_timesteps = len(self._fs.timesteps) + horizon_with_overlap = horizon + overlap + + if horizon_with_overlap > total_timesteps: + raise ValueError( + f'horizon + overlap ({horizon_with_overlap}) cannot exceed total timesteps ({total_timesteps}).' + ) + + # Ensure flow system is connected + if not self._fs.connected_and_transformed: + self._fs.connect_and_transform() + + # Calculate segment indices + segment_indices = self._calculate_segment_indices(total_timesteps, horizon, overlap) + n_segments = len(segment_indices) + logger.info( + f'Starting Rolling Horizon Optimization - Segments: {n_segments}, Horizon: {horizon}, Overlap: {overlap}' + ) + + # Create and solve segments + segment_flow_systems: list[FlowSystem] = [] + + progress_bar = tqdm( + enumerate(segment_indices), + total=n_segments, + desc='Solving segments', + unit='segment', + file=sys.stdout, + disable=not CONFIG.Solving.log_to_console, + ) + + try: + for i, (start_idx, end_idx) in progress_bar: + progress_bar.set_description(f'Segment {i + 1}/{n_segments} (timesteps {start_idx}-{end_idx})') + + # Suppress output when progress bar is shown (including logger and solver) + if CONFIG.Solving.log_to_console: + # Temporarily raise logger level to suppress INFO messages + original_level = logger.level + logger.setLevel(logging.WARNING) + try: + with suppress_output(): + segment_fs = self._fs.transform.isel(time=slice(start_idx, end_idx)) + if i > 0 and nr_of_previous_values > 0: + self._transfer_state( + source_fs=segment_flow_systems[i - 1], + target_fs=segment_fs, + horizon=horizon, + nr_of_previous_values=nr_of_previous_values, + ) + segment_fs.build_model() + if i == 0: + self._check_no_investments(segment_fs) + segment_fs.solve(solver) + finally: + logger.setLevel(original_level) + else: + segment_fs = self._fs.transform.isel(time=slice(start_idx, end_idx)) + if i > 0 and nr_of_previous_values > 0: + self._transfer_state( + source_fs=segment_flow_systems[i - 1], + target_fs=segment_fs, + horizon=horizon, + nr_of_previous_values=nr_of_previous_values, + ) + segment_fs.build_model() + if i == 0: + self._check_no_investments(segment_fs) + segment_fs.solve(solver) + + segment_flow_systems.append(segment_fs) + + finally: + progress_bar.close() + + # Combine segment solutions + logger.info('Combining segment solutions...') + self._finalize_solution(segment_flow_systems, horizon) + + logger.info(f'Rolling horizon optimization completed: {n_segments} segments solved.') + + return segment_flow_systems + + def _calculate_segment_indices(self, total_timesteps: int, horizon: int, overlap: int) -> list[tuple[int, int]]: + """Calculate start and end indices for each segment.""" + segments = [] + start = 0 + while start < total_timesteps: + end = min(start + horizon + overlap, total_timesteps) + segments.append((start, end)) + start += horizon # Move by horizon (not horizon + overlap) + if end == total_timesteps: + break + return segments + + def _transfer_state( + self, + source_fs: FlowSystem, + target_fs: FlowSystem, + horizon: int, + nr_of_previous_values: int, + ) -> None: + """Transfer final state from source segment to target segment. + + Transfers: + - Flow previous_flow_rate: Last nr_of_previous_values from non-overlap portion + - Storage initial_charge_state: Charge state at end of non-overlap portion + """ + from .components import Storage + + solution = source_fs.solution + time_slice = slice(horizon - nr_of_previous_values, horizon) + + # Transfer flow rates (for uptime/downtime tracking) + for label, target_flow in target_fs.flows.items(): + var_name = f'{label}|flow_rate' + if var_name in solution: + values = solution[var_name].isel(time=time_slice).values + target_flow.previous_flow_rate = values.item() if values.size == 1 else values + + # Transfer storage charge states + for label, target_comp in target_fs.components.items(): + if isinstance(target_comp, Storage): + var_name = f'{label}|charge_state' + if var_name in solution: + target_comp.initial_charge_state = solution[var_name].isel(time=horizon).item() + + def _check_no_investments(self, segment_fs: FlowSystem) -> None: + """Check that no InvestParameters are used (not supported in rolling horizon).""" + from .features import InvestmentModel + + invest_elements = [] + for component in segment_fs.components.values(): + for model in component.submodel.all_submodels: + if isinstance(model, InvestmentModel): + invest_elements.append(model.label_full) + + if invest_elements: + raise ValueError( + f'InvestParameters are not supported in rolling horizon optimization. ' + f'Found InvestmentModels: {invest_elements}. ' + f'Use standard optimize() for problems with investments.' + ) + + def _finalize_solution( + self, + segment_flow_systems: list[FlowSystem], + horizon: int, + ) -> None: + """Combine segment solutions and compute derived values directly (no re-solve).""" + # Combine all solution variables from segments + combined_solution = self._combine_solutions(segment_flow_systems, horizon) + + # Assign combined solution to the original FlowSystem + self._fs._solution = combined_solution + + def _combine_solutions( + self, + segment_flow_systems: list[FlowSystem], + horizon: int, + ) -> xr.Dataset: + """Combine segment solutions into a single Dataset. + + - Time-dependent variables: concatenated with overlap trimming + - Effect temporal/total: recomputed from per-timestep values + - Other scalars (including periodic): NaN (not meaningful for rolling horizon) + """ + if not segment_flow_systems: + raise ValueError('No segments to combine.') + + effect_labels = set(self._fs.effects.keys()) + combined_vars: dict[str, xr.DataArray] = {} + first_solution = segment_flow_systems[0].solution + + # Step 1: Time-dependent → concatenate; Scalars → NaN + for var_name, first_var in first_solution.data_vars.items(): + if 'time' in first_var.dims: + arrays = [ + seg.solution[var_name].isel( + time=slice(None, horizon if i < len(segment_flow_systems) - 1 else None) + ) + for i, seg in enumerate(segment_flow_systems) + ] + combined_vars[var_name] = xr.concat(arrays, dim='time') + else: + combined_vars[var_name] = xr.DataArray(float('nan')) + + # Step 2: Recompute effect totals from per-timestep values + for effect in effect_labels: + per_ts = f'{effect}(temporal)|per_timestep' + if per_ts in combined_vars: + temporal_sum = combined_vars[per_ts].sum(dim='time', skipna=True) + combined_vars[f'{effect}(temporal)'] = temporal_sum + combined_vars[effect] = temporal_sum # Total = temporal (periodic is NaN/unsupported) + + return xr.Dataset(combined_vars) diff --git a/mkdocs.yml b/mkdocs.yml index 186e109fd..551fac523 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -69,7 +69,8 @@ nav: - Piecewise Effects: notebooks/06c-piecewise-effects.ipynb - Scaling: - Scenarios: notebooks/07-scenarios-and-periods.ipynb - - Large-Scale: notebooks/08-large-scale-optimization.ipynb + - Aggregation: notebooks/08a-aggregation.ipynb + - Rolling Horizon: notebooks/08b-rolling-horizon.ipynb - Results: - Plotting: notebooks/09-plotting-and-data-access.ipynb From 70f670c20009b24b240187adb854ad8bd8ef8219 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 13 Dec 2025 21:28:33 +0100 Subject: [PATCH 42/49] Typo in CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4f2eb344c..63804b551 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -238,7 +238,7 @@ costs = results.model['costs'].solution.item() # New (v5.0) flow_system.optimize(solver) -costs = flow_system.solution['costs(total)'].item() +costs = flow_system.solution['costs'].item() ``` **Renamed `OnOffParameters` → `StatusParameters`**: Complete terminology update to align with industry standards (PyPSA, unit commitment). This is a clean breaking change with no backwards compatibility wrapper. From 3bcd81ad8ccf00c55ac5efaa27f3a7ccf2486e75 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 13 Dec 2025 21:45:22 +0100 Subject: [PATCH 43/49] Add storage cache --- flixopt/flow_system.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index a756b646a..adbd0fdaf 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -210,6 +210,7 @@ def __init__( self._network_app = None self._flows_cache: ElementContainer[Flow] | None = None + self._storages_cache: ElementContainer[Storage] | None = None # Solution dataset - populated after optimization or loaded from file self._solution: xr.Dataset | None = None @@ -1702,6 +1703,7 @@ def _add_components(self, *components: Component) -> None: # Invalidate cache once after all additions if components: self._flows_cache = None + self._storages_cache = None def _add_buses(self, *buses: Bus): for new_bus in list(buses): @@ -1710,6 +1712,7 @@ def _add_buses(self, *buses: Bus): # Invalidate cache once after all additions if buses: self._flows_cache = None + self._storages_cache = None def _connect_network(self): """Connects the network of components and buses. Can be rerun without changes if no elements were added""" @@ -1817,9 +1820,11 @@ def storages(self) -> ElementContainer[Storage]: ElementContainer containing all Storage components in the FlowSystem, sorted by label for reproducibility. """ - storages = [c for c in self.components.values() if isinstance(c, Storage)] - storages = sorted(storages, key=lambda s: s.label_full.lower()) - return ElementContainer(storages, element_type_name='storages', truncate_repr=10) + if self._storages_cache is None: + storages = [c for c in self.components.values() if isinstance(c, Storage)] + storages = sorted(storages, key=lambda s: s.label_full.lower()) + self._storages_cache = ElementContainer(storages, element_type_name='storages', truncate_repr=10) + return self._storages_cache @property def coords(self) -> dict[FlowSystemDimensions, pd.Index]: From 3981c3c62c1aaef5649a4b2814723a4ba279498f Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 13 Dec 2025 22:01:38 +0100 Subject: [PATCH 44/49] invaliudate carrier cache --- flixopt/flow_system.py | 1 + 1 file changed, 1 insertion(+) diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index adbd0fdaf..a3dff9c96 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -1391,6 +1391,7 @@ def _invalidate_model(self) -> None: self.model = None self._connected_and_transformed = False self._topology = None # Invalidate topology accessor (and its cached colors) + self._flow_carriers = None # Invalidate flow-to-carrier mapping for element in self.values(): element.submodel = None element._variable_names = [] From 4133f91a9d9a670d0958f76465c19283ddbfd8c8 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 13 Dec 2025 22:21:47 +0100 Subject: [PATCH 45/49] Improve robustness --- flixopt/flow_system.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index a3dff9c96..2df65b8db 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -1028,12 +1028,17 @@ def connect_and_transform(self): def _register_missing_carriers(self) -> None: """Auto-register carriers from CONFIG for buses that reference unregistered carriers.""" for bus in self.buses.values(): - if bus.carrier and bus.carrier not in self._carriers: - # Try to get from CONFIG defaults - default_carrier = getattr(CONFIG.Carriers, bus.carrier, None) + if not bus.carrier: + continue + carrier_key = bus.carrier.lower() + if carrier_key not in self._carriers: + # Try to get from CONFIG defaults (try original case first, then lowercase) + default_carrier = getattr(CONFIG.Carriers, bus.carrier, None) or getattr( + CONFIG.Carriers, carrier_key, None + ) if default_carrier is not None: - self._carriers[bus.carrier] = default_carrier - logger.debug(f"Auto-registered carrier '{bus.carrier}' from CONFIG") + self._carriers[carrier_key] = default_carrier + logger.debug(f"Auto-registered carrier '{carrier_key}' from CONFIG") def _assign_element_colors(self) -> None: """Auto-assign colors to elements that don't have explicit colors set. @@ -1318,7 +1323,7 @@ def solve(self, solver: _Solver) -> FlowSystem: **solver.options, ) - if 'infeasible' in self.model.termination_condition: + if self.model.termination_condition in ('infeasible', 'infeasible_or_unbounded'): if CONFIG.Solving.compute_infeasibilities: import io from contextlib import redirect_stdout From 68992eb9e9a5cf4fa1f0d3040528f13875cdf93f Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 13 Dec 2025 22:24:29 +0100 Subject: [PATCH 46/49] Added include_solution parameter to to_dataset() --- flixopt/flow_system.py | 32 ++++++++++++++------------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 2df65b8db..5059d1917 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -577,15 +577,20 @@ def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]: return reference_structure, all_extracted_arrays - def to_dataset(self) -> xr.Dataset: + def to_dataset(self, include_solution: bool = True) -> xr.Dataset: """ Convert the FlowSystem to an xarray Dataset. Ensures FlowSystem is connected before serialization. - If a solution is present, it will be included in the dataset with variable names - prefixed by 'solution|' to avoid conflicts with FlowSystem configuration variables. - Solution time coordinates are renamed to 'solution_time' to preserve them - independently of the FlowSystem's time coordinates. + If a solution is present and `include_solution=True`, it will be included + in the dataset with variable names prefixed by 'solution|' to avoid conflicts + with FlowSystem configuration variables. Solution time coordinates are renamed + to 'solution_time' to preserve them independently of the FlowSystem's time coordinates. + + Args: + include_solution: Whether to include the optimization solution in the dataset. + Defaults to True. Set to False to get only the FlowSystem structure + without solution data (useful for copying or saving templates). Returns: xr.Dataset: Dataset containing all DataArrays with structure in attributes @@ -596,8 +601,8 @@ def to_dataset(self) -> xr.Dataset: ds = super().to_dataset() - # Include solution data if present - if self.solution is not None: + # Include solution data if present and requested + if include_solution and self.solution is not None: # Rename 'time' to 'solution_time' in solution variables to preserve full solution # (linopy solution may have extra timesteps, e.g., for final charge states) solution_renamed = ( @@ -868,17 +873,8 @@ def copy(self) -> FlowSystem: >>> variant.add_elements(new_component) >>> variant.optimize(solver) """ - # Temporarily clear solution to use standard serialization without solution data - original_solution = self._solution - self._solution = None - try: - ds = self.to_dataset() - finally: - self._solution = original_solution - - # Create new FlowSystem from dataset (without solution) - new_fs = FlowSystem.from_dataset(ds.copy(deep=True)) - return new_fs + ds = self.to_dataset(include_solution=False) + return FlowSystem.from_dataset(ds.copy(deep=True)) def __copy__(self): """Support for copy.copy().""" From 002a3370ed4171af2c94c8618e04c36b8e48fdac Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 13 Dec 2025 22:31:20 +0100 Subject: [PATCH 47/49] Add flixopt_version to attrs in IO --- flixopt/flow_system.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 5059d1917..c6c21bb2a 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -15,6 +15,7 @@ import pandas as pd import xarray as xr +from . import __version__ from . import io as fx_io from .components import Storage from .config import CONFIG, DEPRECATION_REMOVAL_VERSION @@ -626,6 +627,9 @@ def to_dataset(self, include_solution: bool = True) -> xr.Dataset: carriers_structure[name] = carrier_ref ds.attrs['carriers'] = json.dumps(carriers_structure) + # Add version info + ds.attrs['flixopt_version'] = __version__ + return ds @classmethod From 5d17f9a098180813d349ae7405cbc1c1f61263e9 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 13 Dec 2025 23:48:44 +0100 Subject: [PATCH 48/49] Typos for better clarity --- flixopt/flow_system.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index c6c21bb2a..8f2dba51b 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -1050,7 +1050,8 @@ def _assign_element_colors(self) -> None: from .color_processing import process_colors # Collect elements without colors (components only - buses use carrier colors) - elements_without_colors = [comp.label for comp in self.components.values() if comp.color is None] + # Use label_full for consistent keying with ElementContainer + elements_without_colors = [comp.label_full for comp in self.components.values() if comp.color is None] if not elements_without_colors: return @@ -1060,9 +1061,9 @@ def _assign_element_colors(self) -> None: color_mapping = process_colors(colorscale, elements_without_colors) # Assign colors to elements - for label, color in color_mapping.items(): - self.components[label].color = color - logger.debug(f"Auto-assigned color '{color}' to component '{label}'") + for label_full, color in color_mapping.items(): + self.components[label_full].color = color + logger.debug(f"Auto-assigned color '{color}' to component '{label_full}'") def add_elements(self, *elements: Element) -> None: """ @@ -1281,7 +1282,12 @@ def _add_clustering_constraints(self) -> None: """Add clustering constraints to the model.""" from .clustering import ClusteringModel - info = self._clustering_info + info = self._clustering_info or {} + required_keys = {'parameters', 'clustering', 'components_to_clusterize'} + missing_keys = required_keys - set(info) + if missing_keys: + raise KeyError(f'_clustering_info missing required keys: {sorted(missing_keys)}') + clustering_model = ClusteringModel( model=self.model, clustering_parameters=info['parameters'], From 78969bddf2619670fd5606bfbfa93789895d8c01 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sun, 14 Dec 2025 16:10:16 +0100 Subject: [PATCH 49/49] drop python 3.10 support and add 3.14 (#529) * drop python 3.10 support and add 3.14 * Exclude gurobi from py314 * scipy==1.16.3 in dev for python 3.14 * Fix tests without gurobi installed (py314) --- .github/workflows/tests.yaml | 2 +- pyproject.toml | 15 ++++++--------- tests/deprecated/test_scenarios.py | 6 ++++++ tests/test_scenarios.py | 6 ++++++ 4 files changed, 19 insertions(+), 10 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 5fa245f49..e8713993b 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -45,7 +45,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.10', '3.11', '3.12', '3.13'] + python-version: ['3.11', '3.12', '3.13', '3.14'] steps: - uses: actions/checkout@v5 diff --git a/pyproject.toml b/pyproject.toml index 88691e468..c178c428b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ name = "flixopt" dynamic = ["version"] description = "Progressive flow system optimization in Python - start simple, scale to complex." readme = "README.md" -requires-python = ">=3.10" +requires-python = ">=3.11" license = "MIT" authors = [ { name = "Chair of Building Energy Systems and Heat Supply, TU Dresden", email = "peter.stange@tu-dresden.de" }, @@ -22,10 +22,10 @@ maintainers = [ keywords = ["optimization", "energy systems", "numerical analysis"] classifiers = [ "Development Status :: 4 - Beta", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "Topic :: Scientific/Engineering", @@ -42,14 +42,11 @@ dependencies = [ "pyyaml >= 6.0.0, < 7", "colorlog >= 6.8.0, < 7", "tqdm >= 4.66.0, < 5", - "tomli >= 2.0.1, < 3; python_version < '3.11'", # Only needed with python 3.10 or earlier # Default solver "highspy >= 1.5.3, < 2", # Visualization "matplotlib >= 3.5.2, < 4", "plotly >= 5.15.0, < 7", - # Fix for numexpr compatibility issue with numpy 1.26.4 on Python 3.10 - "numexpr >= 2.8.4, < 2.14; python_version < '3.11'", # Avoid 2.14.0 on older Python ] [project.optional-dependencies] @@ -68,7 +65,7 @@ full = [ "pyvis==0.3.2", # Visualizing FlowSystem Network "tsam >= 2.3.1, < 3", # Time series aggregation "scipy >= 1.15.1, < 2", # Used by tsam. Prior versions have conflict with highspy. See https://github.com/scipy/scipy/issues/22257 - "gurobipy >= 10.0.0, < 13", + "gurobipy >= 10.0.0, < 14; python_version < '3.14'", # No Python 3.14 wheels yet (expected Q1 2026) "dash >= 3.0.0, < 4", # Visualizing FlowSystem Network as app "dash-cytoscape >= 1.0.0, < 2", # Visualizing FlowSystem Network as app "dash-daq >= 0.6.0, < 1", # Visualizing FlowSystem Network as app @@ -86,8 +83,8 @@ dev = [ "pre-commit==4.3.0", "pyvis==0.3.2", "tsam==2.3.9", - "scipy==1.15.1", - "gurobipy==12.0.3", + "scipy==1.16.3", # 1.16.1+ required for Python 3.14 wheels + "gurobipy==12.0.3; python_version < '3.14'", # No Python 3.14 wheels yet "dash==3.3.0", "dash-cytoscape==1.0.2", "dash-daq==0.6.0", @@ -134,7 +131,7 @@ include-package-data = true version_scheme = "post-release" [tool.ruff] -target-version = "py310" # Adjust to your minimum version +target-version = "py311" # Minimum supported version # Files or directories to exclude (e.g., virtual environments, cache, build artifacts) exclude = [ "venv", # Virtual environments diff --git a/tests/deprecated/test_scenarios.py b/tests/deprecated/test_scenarios.py index b4a1cd161..65ea62d81 100644 --- a/tests/deprecated/test_scenarios.py +++ b/tests/deprecated/test_scenarios.py @@ -1,3 +1,5 @@ +import importlib.util + import numpy as np import pandas as pd import pytest @@ -11,6 +13,8 @@ from .conftest import create_linopy_model +GUROBI_AVAILABLE = importlib.util.find_spec('gurobipy') is not None + @pytest.fixture def test_system(): @@ -289,6 +293,7 @@ def test_scenario_dimensions_in_variables(flow_system_piecewise_conversion_scena assert model.variables[var].dims in [('time', 'scenario'), ('scenario',), ()] +@pytest.mark.skipif(not GUROBI_AVAILABLE, reason='Gurobi solver not installed') def test_full_scenario_optimization(flow_system_piecewise_conversion_scenarios): """Test a full optimization with scenarios and verify results.""" scenarios = flow_system_piecewise_conversion_scenarios.scenarios @@ -325,6 +330,7 @@ def test_io_persistence(flow_system_piecewise_conversion_scenarios, tmp_path): np.testing.assert_allclose(original_objective, flow_system_2.solution['objective'].item(), rtol=0.001) +@pytest.mark.skipif(not GUROBI_AVAILABLE, reason='Gurobi solver not installed') def test_scenarios_selection(flow_system_piecewise_conversion_scenarios): """Test scenario selection/subsetting functionality.""" flow_system_full = flow_system_piecewise_conversion_scenarios diff --git a/tests/test_scenarios.py b/tests/test_scenarios.py index b4a1cd161..65ea62d81 100644 --- a/tests/test_scenarios.py +++ b/tests/test_scenarios.py @@ -1,3 +1,5 @@ +import importlib.util + import numpy as np import pandas as pd import pytest @@ -11,6 +13,8 @@ from .conftest import create_linopy_model +GUROBI_AVAILABLE = importlib.util.find_spec('gurobipy') is not None + @pytest.fixture def test_system(): @@ -289,6 +293,7 @@ def test_scenario_dimensions_in_variables(flow_system_piecewise_conversion_scena assert model.variables[var].dims in [('time', 'scenario'), ('scenario',), ()] +@pytest.mark.skipif(not GUROBI_AVAILABLE, reason='Gurobi solver not installed') def test_full_scenario_optimization(flow_system_piecewise_conversion_scenarios): """Test a full optimization with scenarios and verify results.""" scenarios = flow_system_piecewise_conversion_scenarios.scenarios @@ -325,6 +330,7 @@ def test_io_persistence(flow_system_piecewise_conversion_scenarios, tmp_path): np.testing.assert_allclose(original_objective, flow_system_2.solution['objective'].item(), rtol=0.001) +@pytest.mark.skipif(not GUROBI_AVAILABLE, reason='Gurobi solver not installed') def test_scenarios_selection(flow_system_piecewise_conversion_scenarios): """Test scenario selection/subsetting functionality.""" flow_system_full = flow_system_piecewise_conversion_scenarios