From 87bd4227e25dd525595ba52088c9fdb9e3072ff8 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Tue, 4 Nov 2025 12:14:48 +0100 Subject: [PATCH 01/35] Feature/speed up resample (#455) --- CHANGELOG.md | 1 + flixopt/flow_system.py | 94 +++++++++++++++++++++++++++++++++--------- 2 files changed, 75 insertions(+), 20 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f78503cf2..9cbdcfdfb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -60,6 +60,7 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp ### 💥 Breaking Changes ### ♻️ Changed +- Greatly sped up the resampling of a FlowSystem (x20 - x40) by converting to dataarray internally ### 🗑️ Deprecated diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 9bc7f7f99..760addc22 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -6,6 +6,7 @@ import logging import warnings +from collections import defaultdict from itertools import chain from typing import TYPE_CHECKING, Any, Literal, Optional @@ -982,6 +983,65 @@ def isel( selected_dataset = ds.isel(**indexers) return self.__class__.from_dataset(selected_dataset) + def _resample_by_dimension_groups( + self, + time_dataset: xr.Dataset, + time: str, + method: str, + **kwargs: Any, + ) -> xr.Dataset: + """ + Resample variables grouped by their dimension structure to avoid broadcasting. + + This method groups variables by their non-time dimensions before resampling, + which provides two key benefits: + + 1. **Performance**: Resampling many variables with the same dimensions together + is significantly faster than resampling each variable individually. + + 2. **Safety**: Prevents xarray from broadcasting variables with different + dimensions into a larger dimensional space filled with NaNs, which would + cause memory bloat and computational inefficiency. + + Example: + Without grouping (problematic): + var1: (time, location, tech) shape (8000, 10, 2) + var2: (time, region) shape (8000, 5) + concat → (variable, time, location, tech, region) ← Unwanted broadcasting! + + With grouping (safe and fast): + Group 1: [var1, var3, ...] with dims (time, location, tech) + Group 2: [var2, var4, ...] with dims (time, region) + Each group resampled separately → No broadcasting, optimal performance! + + Args: + time_dataset: Dataset containing only variables with time dimension + time: Resampling frequency (e.g., '2h', '1D', '1M') + method: Resampling method name (e.g., 'mean', 'sum', 'first') + **kwargs: Additional arguments passed to xarray.resample() + + Returns: + Resampled dataset with original dimension structure preserved + """ + # Group variables by dimensions (excluding time) + dim_groups = defaultdict(list) + for var_name, var in time_dataset.data_vars.items(): + dims_key = tuple(sorted(d for d in var.dims if d != 'time')) + dim_groups[dims_key].append(var_name) + + # Handle empty case: no time-dependent variables + if not dim_groups: + return getattr(time_dataset.resample(time=time, **kwargs), method)() + + # Resample each group separately + resampled_groups = [] + for var_names in dim_groups.values(): + grouped_dataset = time_dataset[var_names] + resampled_group = getattr(grouped_dataset.resample(time=time, **kwargs), method)() + resampled_groups.append(resampled_group) + + return xr.merge(resampled_groups) + def resample( self, time: str, @@ -1007,34 +1067,28 @@ def resample( if not self.connected_and_transformed: self.connect_and_transform() - dataset = self.to_dataset() + # Validate method before resampling + available_methods = ['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] + if method not in available_methods: + raise ValueError(f'Unsupported resampling method: {method}. Available: {available_methods}') - # Separate variables with and without time dimension - time_vars = {} - non_time_vars = {} + dataset = self.to_dataset() - for var_name, var in dataset.data_vars.items(): - if 'time' in var.dims: - time_vars[var_name] = var - else: - non_time_vars[var_name] = var + time_var_names = [v for v in dataset.data_vars if 'time' in dataset[v].dims] + non_time_var_names = [v for v in dataset.data_vars if v not in time_var_names] # Only resample variables that have time dimension - time_dataset = dataset[list(time_vars.keys())] - resampler = time_dataset.resample(time=time, **kwargs) + time_dataset = dataset[time_var_names] - if hasattr(resampler, method): - resampled_time_data = getattr(resampler, method)() - else: - available_methods = ['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] - raise ValueError(f'Unsupported resampling method: {method}. Available: {available_methods}') + # Resample with dimension grouping to avoid broadcasting + resampled_time_dataset = self._resample_by_dimension_groups(time_dataset, time, method, **kwargs) # Combine resampled time variables with non-time variables - if non_time_vars: - non_time_dataset = dataset[list(non_time_vars.keys())] - resampled_dataset = xr.merge([resampled_time_data, non_time_dataset]) + if non_time_var_names: + non_time_dataset = dataset[non_time_var_names] + resampled_dataset = xr.merge([resampled_time_dataset, non_time_dataset]) else: - resampled_dataset = resampled_time_data + resampled_dataset = resampled_time_dataset # Let FlowSystem recalculate or use explicitly set value resampled_dataset.attrs['hours_of_last_timestep'] = hours_of_last_timestep From efb85322da528c52a366e3e0bcd1d3c08d2678b2 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Tue, 4 Nov 2025 13:26:22 +0100 Subject: [PATCH 02/35] Update CHANGELOG.md --- CHANGELOG.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9cbdcfdfb..5fd7143cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -60,7 +60,6 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp ### 💥 Breaking Changes ### ♻️ Changed -- Greatly sped up the resampling of a FlowSystem (x20 - x40) by converting to dataarray internally ### 🗑️ Deprecated @@ -82,6 +81,17 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp Until here --> +## [3.4.1] - 2025-11-04 + +**Summary**: Speed up resampling by 20-40 times. + +If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOpt/flixOpt/releases/tag/v3.0.0) and [Migration Guide](https://flixopt.github.io/flixopt/latest/user-guide/migration-guide-v3/). + +### ♻️ Changed +- Greatly sped up the resampling of a FlowSystem (x20 - x40) by converting to dataarray internally + +--- + ## [3.4.0] - 2025-11-01 **Summary**: Enhanced solver configuration with new CONFIG.Solving section for centralized solver parameter management. From fe3fe2345e53c80c740e942007a813c916fd0385 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Wed, 5 Nov 2025 08:39:29 +0100 Subject: [PATCH 03/35] Feature/compact repr (#457) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add truncation to repr of containers * Add truncation to results.py * Changes Made 1. ContainerMixin in structure.py - Changed truncate_repr from bool to int | None - None = show all items (no truncation) - Integer value = maximum number of items to show in repr - Default is None (no truncation) 2. Updated _get_repr() method - Simplified parameters from (truncate: bool, max_items: int) to just (max_items: int | None) - Uses instance's _truncate_repr as default if max_items is not provided - Cleaner logic: truncates only when limit is not None and limit > 0 3. Updated __repr__() method - Now simply calls self._get_repr() without arguments - Respects the instance's truncate_repr setting 4. Simplified _format_grouped_containers() - Removed the conditional logic checking for _truncate_repr - Now just calls repr(container) which automatically respects each container's setting 5. Updated all call sites - flow_system.py: Changed truncate_repr=True → truncate_repr=10 (4 locations) - results.py: Changed truncate_repr=True → truncate_repr=10 (4 locations) - effects.py: Changed truncate_repr: bool = False → truncate_repr: int | None = None and added docstring * Update CHANGELOG.md --- CHANGELOG.md | 1 + flixopt/effects.py | 11 +++++++++-- flixopt/flow_system.py | 10 ++++++---- flixopt/results.py | 10 ++++++---- flixopt/structure.py | 37 +++++++++++++++++++++++++++++++++---- 5 files changed, 55 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5fd7143cc..27eeef2fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -60,6 +60,7 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp ### 💥 Breaking Changes ### ♻️ Changed +- Truncate repr of FlowSystem and CalculationResults to only show the first 10 items of each category ### 🗑️ Deprecated diff --git a/flixopt/effects.py b/flixopt/effects.py index 8d8efbf4c..ddf8eadeb 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -458,8 +458,15 @@ class EffectCollection(ElementContainer[Effect]): submodel: EffectCollectionModel | None - def __init__(self, *effects: Effect): - super().__init__(element_type_name='effects') + def __init__(self, *effects: Effect, truncate_repr: int | None = None): + """ + Initialize the EffectCollection. + + Args: + *effects: Effects to register in the collection. + truncate_repr: Maximum number of items to show in repr. If None, show all items. Default: None + """ + super().__init__(element_type_name='effects', truncate_repr=truncate_repr) self._standard_effect: Effect | None = None self._objective_effect: Effect | None = None diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 760addc22..aaa3a3a99 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -175,9 +175,11 @@ def __init__( self.hours_per_timestep = self.fit_to_model_coords('hours_per_timestep', hours_per_timestep) # Element collections - self.components: ElementContainer[Component] = ElementContainer(element_type_name='components') - self.buses: ElementContainer[Bus] = ElementContainer(element_type_name='buses') - self.effects: EffectCollection = EffectCollection() + self.components: ElementContainer[Component] = ElementContainer( + element_type_name='components', truncate_repr=10 + ) + self.buses: ElementContainer[Bus] = ElementContainer(element_type_name='buses', truncate_repr=10) + self.effects: EffectCollection = EffectCollection(truncate_repr=10) self.model: FlowSystemModel | None = None self._connected_and_transformed = False @@ -799,7 +801,7 @@ def flows(self) -> ElementContainer[Flow]: flows = [f for c in self.components.values() for f in c.inputs + c.outputs] # Deduplicate by id and sort for reproducibility flows = sorted({id(f): f for f in flows}.values(), key=lambda f: f.label_full.lower()) - self._flows_cache = ElementContainer(flows, element_type_name='flows') + self._flows_cache = ElementContainer(flows, element_type_name='flows', truncate_repr=10) return self._flows_cache @property diff --git a/flixopt/results.py b/flixopt/results.py index 954af6669..3d9aedf62 100644 --- a/flixopt/results.py +++ b/flixopt/results.py @@ -246,13 +246,15 @@ def __init__( components_dict = { label: ComponentResults(self, **infos) for label, infos in self.solution.attrs['Components'].items() } - self.components = ResultsContainer(elements=components_dict, element_type_name='component results') + self.components = ResultsContainer( + elements=components_dict, element_type_name='component results', truncate_repr=10 + ) buses_dict = {label: BusResults(self, **infos) for label, infos in self.solution.attrs['Buses'].items()} - self.buses = ResultsContainer(elements=buses_dict, element_type_name='bus results') + self.buses = ResultsContainer(elements=buses_dict, element_type_name='bus results', truncate_repr=10) effects_dict = {label: EffectResults(self, **infos) for label, infos in self.solution.attrs['Effects'].items()} - self.effects = ResultsContainer(elements=effects_dict, element_type_name='effect results') + self.effects = ResultsContainer(elements=effects_dict, element_type_name='effect results', truncate_repr=10) if 'Flows' not in self.solution.attrs: warnings.warn( @@ -267,7 +269,7 @@ def __init__( label: FlowResults(self, **infos) for label, infos in self.solution.attrs.get('Flows', {}).items() } self._has_flow_data = True - self.flows = ResultsContainer(elements=flows_dict, element_type_name='flow results') + self.flows = ResultsContainer(elements=flows_dict, element_type_name='flow results', truncate_repr=10) self.timesteps_extra = self.solution.indexes['time'] self.hours_per_timestep = FlowSystem.calculate_hours_per_timestep(self.timesteps_extra) diff --git a/flixopt/structure.py b/flixopt/structure.py index e2aa6ee87..2bce6aa52 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -921,14 +921,17 @@ def __init__( self, elements: list[T] | dict[str, T] | None = None, element_type_name: str = 'elements', + truncate_repr: int | None = None, ): """ Args: elements: Initial elements to add (list or dict) element_type_name: Name for display (e.g., 'components', 'buses') + truncate_repr: Maximum number of items to show in repr. If None, show all items. Default: None """ super().__init__() self._element_type_name = element_type_name + self._truncate_repr = truncate_repr if elements is not None: if isinstance(elements, dict): @@ -999,8 +1002,20 @@ def __getitem__(self, label: str) -> T: error_msg += f' Available: {", ".join(available[:5])} ... (+{len(available) - 5} more)' raise KeyError(error_msg) from None - def __repr__(self) -> str: - """Return a string representation similar to linopy.model.Variables.""" + def _get_repr(self, max_items: int | None = None) -> str: + """ + Get string representation with optional truncation. + + Args: + max_items: Maximum number of items to show. If None, uses instance default (self._truncate_repr). + If still None, shows all items. + + Returns: + Formatted string representation + """ + # Use provided max_items, or fall back to instance default + limit = max_items if max_items is not None else self._truncate_repr + count = len(self) title = f'{self._element_type_name.capitalize()} ({count} item{"s" if count != 1 else ""})' @@ -1009,11 +1024,24 @@ def __repr__(self) -> str: r += '\n' else: r = fx_io.format_title_with_underline(title) - for name in sorted(self.keys(), key=_natural_sort_key): - r += f' * {name}\n' + sorted_names = sorted(self.keys(), key=_natural_sort_key) + + if limit is not None and limit > 0 and len(sorted_names) > limit: + # Show truncated list + for name in sorted_names[:limit]: + r += f' * {name}\n' + r += f' ... (+{len(sorted_names) - limit} more)\n' + else: + # Show all items + for name in sorted_names: + r += f' * {name}\n' return r + def __repr__(self) -> str: + """Return a string representation using the instance's truncate_repr setting.""" + return self._get_repr() + class ElementContainer(ContainerMixin[T]): """ @@ -1215,6 +1243,7 @@ def _format_grouped_containers(self, title: str | None = None) -> str: if container: # Only show non-empty groups if parts: # Add spacing between sections parts.append('') + # Use container's __repr__ which respects its truncate_repr setting parts.append(repr(container).rstrip('\n')) return '\n'.join(parts) From 275cd6ae21b546fc69d9ce06a6e3c939e2ce6db7 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Thu, 6 Nov 2025 19:21:54 +0100 Subject: [PATCH 04/35] Feature/speed up resample again (#458) * Resample a single concatenated dataarray instead of a Dataset * Performance improvements * Use helper method for resampling speed up resampling * Improve docstring * Improve docstring * avoiding attribute conflicts and empty merge errors * moving method validation earlier for fail-fast behavior * Update CHANGELOG.md * Add new combined select and resample method * Remove code duplication * Add benchmark * Improve becnhmark * Improve becnhmark * Add power user chaining options * Remove becnhmark * Use dask chunking in resample * Make the new methods class methods * Update benchmark_bottleneck.py * Use dataframe based approach * registry pattern * registry pattern * Improve benchmark * Benchmark datarray version * Use dataarray conversion before resampling * Benchmark dask speedup * Add dask chunking * Remove dask chunking due to negligible improvements * Remove benchmark_bottleneck.py * Update CHANGELOG.md * Make ._dataset_... methods self contained, handling time index stuff directly * Use helper method * further deduplication and consistency improvements * improve docstrings * ruff format * fixed the metadata preservation issue in flow_system.py:352-369 * Typo * Add test * Speed up tests * ruff format * Improve tests * Linting fixes * Fix tests --- CHANGELOG.md | 2 + flixopt/flow_system.py | 378 ++++++++++++++++++++++++----- tests/test_resample_equivalence.py | 310 +++++++++++++++++++++++ 3 files changed, 625 insertions(+), 65 deletions(-) create mode 100644 tests/test_resample_equivalence.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 27eeef2fb..13cb24f38 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -56,11 +56,13 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOpt/flixOpt/releases/tag/v3.0.0) and [Migration Guide](https://flixopt.github.io/flixopt/latest/user-guide/migration-guide-v3/). ### ✨ Added +- Added options to resample and select subsets of the flowsystems without converting to and from Dataset each time. Use the new methods `FlowSystem.__dataset_resample()`, `FlowSystem.__dataset_sel()` and `FlowSystem.__dataset_isel()`. All of them expect and return a dataset. ### 💥 Breaking Changes ### ♻️ Changed - Truncate repr of FlowSystem and CalculationResults to only show the first 10 items of each category +- Greatly sped up the resampling of a FlowSystem again ### 🗑️ Deprecated diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index aaa3a3a99..eeb35e6f2 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -58,11 +58,10 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): timesteps: The timesteps of the model. periods: The periods of the model. scenarios: The scenarios of the model. - hours_of_last_timestep: The duration of the last time step. Uses the last time interval if not specified - hours_of_previous_timesteps: The duration of previous timesteps. - If None, the first time increment of time_series is used. - This is needed to calculate previous durations (for example consecutive_on_hours). - If you use an array, take care that its long enough to cover all previous values! + hours_of_last_timestep: Duration of the last timestep. If None, computed from the last time interval. + hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the first time interval. + Can be a scalar (all previous timesteps have same duration) or array (different durations). + Used to calculate previous values (e.g., consecutive_on_hours). weights: The weights of each period and scenario. If None, all scenarios have the same weight (normalized to 1). Its recommended to normalize the weights to sum up to 1. scenario_independent_sizes: Controls whether investment sizes are equalized across scenarios. @@ -121,6 +120,22 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): ... print(f'{bus.label}') >>> >>> # Flows are automatically collected from all components + + Power user pattern - Efficient chaining without conversion overhead: + + >>> # Instead of chaining (causes multiple conversions): + >>> result = flow_system.sel(time='2020-01').resample('2h') # Slow + >>> + >>> # Use dataset methods directly (single conversion): + >>> ds = flow_system.to_dataset() + >>> ds = FlowSystem._dataset_sel(ds, time='2020-01') + >>> ds = flow_system._dataset_resample(ds, freq='2h', method='mean') + >>> result = FlowSystem.from_dataset(ds) # Fast! + >>> + >>> # Available dataset methods: + >>> # - FlowSystem._dataset_sel(dataset, time=..., period=..., scenario=...) + >>> # - FlowSystem._dataset_isel(dataset, time=..., period=..., scenario=...) + >>> # - flow_system._dataset_resample(dataset, freq=..., method=..., **kwargs) >>> for flow in flow_system.flows.values(): ... print(f'{flow.label_full}: {flow.size}') >>> @@ -158,20 +173,20 @@ def __init__( scenario_independent_flow_rates: bool | list[str] = False, ): self.timesteps = self._validate_timesteps(timesteps) - self.timesteps_extra = self._create_timesteps_with_extra(self.timesteps, hours_of_last_timestep) - self.hours_of_previous_timesteps = self._calculate_hours_of_previous_timesteps( - self.timesteps, hours_of_previous_timesteps - ) + + # Compute all time-related metadata using shared helper + ( + self.timesteps_extra, + self.hours_of_last_timestep, + self.hours_of_previous_timesteps, + hours_per_timestep, + ) = self._compute_time_metadata(self.timesteps, hours_of_last_timestep, hours_of_previous_timesteps) self.periods = None if periods is None else self._validate_periods(periods) self.scenarios = None if scenarios is None else self._validate_scenarios(scenarios) self.weights = weights - hours_per_timestep = self.calculate_hours_per_timestep(self.timesteps_extra) - - self.hours_of_last_timestep = hours_per_timestep[-1].item() - self.hours_per_timestep = self.fit_to_model_coords('hours_per_timestep', hours_per_timestep) # Element collections @@ -274,6 +289,87 @@ def _calculate_hours_of_previous_timesteps( first_interval = timesteps[1] - timesteps[0] return first_interval.total_seconds() / 3600 # Convert to hours + @classmethod + def _compute_time_metadata( + cls, + timesteps: pd.DatetimeIndex, + hours_of_last_timestep: int | float | None = None, + hours_of_previous_timesteps: int | float | np.ndarray | None = None, + ) -> tuple[pd.DatetimeIndex, float, float | np.ndarray, xr.DataArray]: + """ + Compute all time-related metadata from timesteps. + + This is the single source of truth for time metadata computation, used by both + __init__ and dataset operations (sel/isel/resample) to ensure consistency. + + Args: + timesteps: The time index to compute metadata from + hours_of_last_timestep: Duration of the last timestep. If None, computed from the time index. + hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the time index. + Can be a scalar or array. + + Returns: + Tuple of (timesteps_extra, hours_of_last_timestep, hours_of_previous_timesteps, hours_per_timestep) + """ + # Create timesteps with extra step at the end + timesteps_extra = cls._create_timesteps_with_extra(timesteps, hours_of_last_timestep) + + # Calculate hours per timestep + hours_per_timestep = cls.calculate_hours_per_timestep(timesteps_extra) + + # Extract hours_of_last_timestep if not provided + if hours_of_last_timestep is None: + hours_of_last_timestep = hours_per_timestep.isel(time=-1).item() + + # Compute hours_of_previous_timesteps (handles both None and provided cases) + hours_of_previous_timesteps = cls._calculate_hours_of_previous_timesteps(timesteps, hours_of_previous_timesteps) + + return timesteps_extra, hours_of_last_timestep, hours_of_previous_timesteps, hours_per_timestep + + @classmethod + def _update_time_metadata( + cls, + dataset: xr.Dataset, + hours_of_last_timestep: int | float | None = None, + hours_of_previous_timesteps: int | float | np.ndarray | None = None, + ) -> xr.Dataset: + """ + Update time-related attributes and data variables in dataset based on its time index. + + Recomputes hours_of_last_timestep, hours_of_previous_timesteps, and hours_per_timestep + from the dataset's time index when these parameters are None. This ensures time metadata + stays synchronized with the actual timesteps after operations like resampling or selection. + + Args: + dataset: Dataset to update (will be modified in place) + hours_of_last_timestep: Duration of the last timestep. If None, computed from the time index. + hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the time index. + Can be a scalar or array. + + Returns: + The same dataset with updated time-related attributes and data variables + """ + new_time_index = dataset.indexes.get('time') + if new_time_index is not None and len(new_time_index) >= 2: + # Use shared helper to compute all time metadata + _, hours_of_last_timestep, hours_of_previous_timesteps, hours_per_timestep = cls._compute_time_metadata( + new_time_index, hours_of_last_timestep, hours_of_previous_timesteps + ) + + # Update hours_per_timestep DataArray if it exists in the dataset + # This prevents stale data after resampling operations + if 'hours_per_timestep' in dataset.data_vars: + dataset['hours_per_timestep'] = hours_per_timestep + + # Update time-related attributes only when new values are provided/computed + # This preserves existing metadata instead of overwriting with None + if hours_of_last_timestep is not None: + dataset.attrs['hours_of_last_timestep'] = hours_of_last_timestep + if hours_of_previous_timesteps is not None: + dataset.attrs['hours_of_previous_timesteps'] = hours_of_previous_timesteps + + return dataset + def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]: """ Override Interface method to handle FlowSystem-specific serialization. @@ -911,6 +1007,63 @@ def scenario_independent_flow_rates(self, value: bool | list[str]) -> None: self._validate_scenario_parameter(value, 'scenario_independent_flow_rates', 'Flow.label_full') self._scenario_independent_flow_rates = value + @classmethod + def _dataset_sel( + cls, + dataset: xr.Dataset, + time: str | slice | list[str] | pd.Timestamp | pd.DatetimeIndex | None = None, + period: int | slice | list[int] | pd.Index | None = None, + scenario: str | slice | list[str] | pd.Index | None = None, + hours_of_last_timestep: int | float | None = None, + hours_of_previous_timesteps: int | float | np.ndarray | None = None, + ) -> xr.Dataset: + """ + Select subset of dataset by label (for power users to avoid conversion overhead). + + This method operates directly on xarray Datasets, allowing power users to chain + operations efficiently without repeated FlowSystem conversions: + + Example: + # Power user pattern (single conversion): + >>> ds = flow_system.to_dataset() + >>> ds = FlowSystem._dataset_sel(ds, time='2020-01') + >>> ds = FlowSystem._dataset_resample(ds, freq='2h', method='mean') + >>> result = FlowSystem.from_dataset(ds) + + # vs. simple pattern (multiple conversions): + >>> result = flow_system.sel(time='2020-01').resample('2h') + + Args: + dataset: xarray Dataset from FlowSystem.to_dataset() + time: Time selection (e.g., '2020-01', slice('2020-01-01', '2020-06-30')) + period: Period selection (e.g., 2020, slice(2020, 2022)) + scenario: Scenario selection (e.g., 'Base Case', ['Base Case', 'High Demand']) + hours_of_last_timestep: Duration of the last timestep. If None, computed from the selected time index. + hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the selected time index. + Can be a scalar or array. + + Returns: + xr.Dataset: Selected dataset + """ + indexers = {} + if time is not None: + indexers['time'] = time + if period is not None: + indexers['period'] = period + if scenario is not None: + indexers['scenario'] = scenario + + if not indexers: + return dataset + + result = dataset.sel(**indexers) + + # Update time-related attributes if time was selected + if 'time' in indexers: + result = cls._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps) + + return result + def sel( self, time: str | slice | list[str] | pd.Timestamp | pd.DatetimeIndex | None = None, @@ -918,22 +1071,56 @@ def sel( scenario: str | slice | list[str] | pd.Index | None = None, ) -> FlowSystem: """ - Select a subset of the flowsystem by the time coordinate. + Select a subset of the flowsystem by label. + + For power users: Use FlowSystem._dataset_sel() to chain operations on datasets + without conversion overhead. See _dataset_sel() documentation. Args: - time: Time selection (e.g., slice('2023-01-01', '2023-12-31'), '2023-06-15', or list of times) + time: Time selection (e.g., slice('2023-01-01', '2023-12-31'), '2023-06-15') period: Period selection (e.g., slice(2023, 2024), or list of periods) - scenario: Scenario selection (e.g., slice('scenario1', 'scenario2'), or list of scenarios) + scenario: Scenario selection (e.g., 'scenario1', or list of scenarios) Returns: FlowSystem: New FlowSystem with selected data """ + if time is None and period is None and scenario is None: + return self.copy() + if not self.connected_and_transformed: self.connect_and_transform() ds = self.to_dataset() + ds = self._dataset_sel(ds, time=time, period=period, scenario=scenario) + return self.__class__.from_dataset(ds) + + @classmethod + def _dataset_isel( + cls, + dataset: xr.Dataset, + time: int | slice | list[int] | None = None, + period: int | slice | list[int] | None = None, + scenario: int | slice | list[int] | None = None, + hours_of_last_timestep: int | float | None = None, + hours_of_previous_timesteps: int | float | np.ndarray | None = None, + ) -> xr.Dataset: + """ + Select subset of dataset by integer index (for power users to avoid conversion overhead). + + See _dataset_sel() for usage pattern. + + Args: + dataset: xarray Dataset from FlowSystem.to_dataset() + time: Time selection by index (e.g., slice(0, 100), [0, 5, 10]) + period: Period selection by index + scenario: Scenario selection by index + hours_of_last_timestep: Duration of the last timestep. If None, computed from the selected time index. + hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the selected time index. + Can be a scalar or array. - # Build indexers dict from non-None parameters + Returns: + xr.Dataset: Selected dataset + """ indexers = {} if time is not None: indexers['time'] = time @@ -943,10 +1130,15 @@ def sel( indexers['scenario'] = scenario if not indexers: - return self.copy() # Return a copy when no selection + return dataset + + result = dataset.isel(**indexers) - selected_dataset = ds.sel(**indexers) - return self.__class__.from_dataset(selected_dataset) + # Update time-related attributes if time was selected + if 'time' in indexers: + result = cls._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps) + + return result def isel( self, @@ -957,6 +1149,9 @@ def isel( """ Select a subset of the flowsystem by integer indices. + For power users: Use FlowSystem._dataset_isel() to chain operations on datasets + without conversion overhead. See _dataset_sel() documentation. + Args: time: Time selection by integer index (e.g., slice(0, 100), 50, or [0, 5, 10]) period: Period selection by integer index (e.g., slice(0, 100), 50, or [0, 5, 10]) @@ -965,28 +1160,19 @@ def isel( Returns: FlowSystem: New FlowSystem with selected data """ + if time is None and period is None and scenario is None: + return self.copy() + if not self.connected_and_transformed: self.connect_and_transform() ds = self.to_dataset() + ds = self._dataset_isel(ds, time=time, period=period, scenario=scenario) + return self.__class__.from_dataset(ds) - # Build indexers dict from non-None parameters - indexers = {} - if time is not None: - indexers['time'] = time - if period is not None: - indexers['period'] = period - if scenario is not None: - indexers['scenario'] = scenario - - if not indexers: - return self.copy() # Return a copy when no selection - - selected_dataset = ds.isel(**indexers) - return self.__class__.from_dataset(selected_dataset) - + @classmethod def _resample_by_dimension_groups( - self, + cls, time_dataset: xr.Dataset, time: str, method: str, @@ -1035,47 +1221,72 @@ def _resample_by_dimension_groups( if not dim_groups: return getattr(time_dataset.resample(time=time, **kwargs), method)() - # Resample each group separately + # Resample each group separately using DataArray concat (faster) resampled_groups = [] for var_names in dim_groups.values(): - grouped_dataset = time_dataset[var_names] - resampled_group = getattr(grouped_dataset.resample(time=time, **kwargs), method)() - resampled_groups.append(resampled_group) + # Skip empty groups + if not var_names: + continue + + # Concat variables into a single DataArray with 'variable' dimension + # Use combine_attrs='drop_conflicts' to handle attribute conflicts + stacked = xr.concat( + [time_dataset[name] for name in var_names], + dim=pd.Index(var_names, name='variable'), + combine_attrs='drop_conflicts', + ) - return xr.merge(resampled_groups) + # Resample the DataArray (faster than resampling Dataset) + resampled = getattr(stacked.resample(time=time, **kwargs), method)() - def resample( - self, - time: str, + # Convert back to Dataset using the 'variable' dimension + resampled_dataset = resampled.to_dataset(dim='variable') + resampled_groups.append(resampled_dataset) + + # Merge all resampled groups, handling empty list case + if not resampled_groups: + return time_dataset # Return empty dataset as-is + + if len(resampled_groups) == 1: + return resampled_groups[0] + + # Merge multiple groups with combine_attrs to avoid conflicts + return xr.merge(resampled_groups, combine_attrs='drop_conflicts') + + @classmethod + def _dataset_resample( + cls, + dataset: xr.Dataset, + freq: str, method: Literal['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] = 'mean', hours_of_last_timestep: int | float | None = None, hours_of_previous_timesteps: int | float | np.ndarray | None = None, **kwargs: Any, - ) -> FlowSystem: + ) -> xr.Dataset: """ - Create a resampled FlowSystem by resampling data along the time dimension (like xr.Dataset.resample()). - Only resamples data variables that have a time dimension. + Resample dataset along time dimension (for power users to avoid conversion overhead). + + Uses optimized _resample_by_dimension_groups() to avoid broadcasting issues. + See _dataset_sel() for usage pattern. Args: - time: Resampling frequency (e.g., '3h', '2D', '1M') - method: Resampling method. Recommended: 'mean', 'first', 'last', 'max', 'min' - hours_of_last_timestep: New duration of the last time step. Defaults to the last time interval of the new timesteps - hours_of_previous_timesteps: New duration of the previous timestep. Defaults to the first time increment of the new timesteps + dataset: xarray Dataset from FlowSystem.to_dataset() + freq: Resampling frequency (e.g., '2h', '1D', '1M') + method: Resampling method (e.g., 'mean', 'sum', 'first') + hours_of_last_timestep: Duration of the last timestep after resampling. If None, computed from the last time interval. + hours_of_previous_timesteps: Duration of previous timesteps after resampling. If None, computed from the first time interval. + Can be a scalar or array. **kwargs: Additional arguments passed to xarray.resample() Returns: - FlowSystem: New resampled FlowSystem + xr.Dataset: Resampled dataset """ - if not self.connected_and_transformed: - self.connect_and_transform() - - # Validate method before resampling + # Validate method available_methods = ['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] if method not in available_methods: raise ValueError(f'Unsupported resampling method: {method}. Available: {available_methods}') - dataset = self.to_dataset() - + # Separate time and non-time variables time_var_names = [v for v in dataset.data_vars if 'time' in dataset[v].dims] non_time_var_names = [v for v in dataset.data_vars if v not in time_var_names] @@ -1083,20 +1294,57 @@ def resample( time_dataset = dataset[time_var_names] # Resample with dimension grouping to avoid broadcasting - resampled_time_dataset = self._resample_by_dimension_groups(time_dataset, time, method, **kwargs) + resampled_time_dataset = cls._resample_by_dimension_groups(time_dataset, freq, method, **kwargs) # Combine resampled time variables with non-time variables if non_time_var_names: non_time_dataset = dataset[non_time_var_names] - resampled_dataset = xr.merge([resampled_time_dataset, non_time_dataset]) + result = xr.merge([resampled_time_dataset, non_time_dataset]) else: - resampled_dataset = resampled_time_dataset + result = resampled_time_dataset + + # Update time-related attributes based on new time index + return cls._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps) + + def resample( + self, + time: str, + method: Literal['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] = 'mean', + hours_of_last_timestep: int | float | None = None, + hours_of_previous_timesteps: int | float | np.ndarray | None = None, + **kwargs: Any, + ) -> FlowSystem: + """ + Create a resampled FlowSystem by resampling data along the time dimension (like xr.Dataset.resample()). + Only resamples data variables that have a time dimension. - # Let FlowSystem recalculate or use explicitly set value - resampled_dataset.attrs['hours_of_last_timestep'] = hours_of_last_timestep - resampled_dataset.attrs['hours_of_previous_timesteps'] = hours_of_previous_timesteps + For power users: Use FlowSystem._dataset_resample() to chain operations on datasets + without conversion overhead. See _dataset_sel() documentation. - return self.__class__.from_dataset(resampled_dataset) + Args: + time: Resampling frequency (e.g., '3h', '2D', '1M') + method: Resampling method. Recommended: 'mean', 'first', 'last', 'max', 'min' + hours_of_last_timestep: Duration of the last timestep after resampling. If None, computed from the last time interval. + hours_of_previous_timesteps: Duration of previous timesteps after resampling. If None, computed from the first time interval. + Can be a scalar or array. + **kwargs: Additional arguments passed to xarray.resample() + + Returns: + FlowSystem: New resampled FlowSystem + """ + if not self.connected_and_transformed: + self.connect_and_transform() + + ds = self.to_dataset() + ds = self._dataset_resample( + ds, + freq=time, + method=method, + hours_of_last_timestep=hours_of_last_timestep, + hours_of_previous_timesteps=hours_of_previous_timesteps, + **kwargs, + ) + return self.__class__.from_dataset(ds) @property def connected_and_transformed(self) -> bool: diff --git a/tests/test_resample_equivalence.py b/tests/test_resample_equivalence.py new file mode 100644 index 000000000..19144b6a1 --- /dev/null +++ b/tests/test_resample_equivalence.py @@ -0,0 +1,310 @@ +""" +Tests to ensure the dimension grouping optimization in _resample_by_dimension_groups +is equivalent to naive Dataset resampling. + +These tests verify that the optimization (grouping variables by dimensions before +resampling) produces identical results to simply calling Dataset.resample() directly. +""" + +import numpy as np +import pandas as pd +import pytest +import xarray as xr + +import flixopt as fx + + +def naive_dataset_resample(dataset: xr.Dataset, freq: str, method: str) -> xr.Dataset: + """ + Naive resampling: simply call Dataset.resample().method() directly. + + This is the straightforward approach without dimension grouping optimization. + """ + return getattr(dataset.resample(time=freq), method)() + + +def create_dataset_with_mixed_dimensions(n_timesteps=48, seed=42): + """ + Create a dataset with variables having different dimension structures. + + This mimics realistic data with: + - Variables with only time dimension + - Variables with time + one other dimension + - Variables with time + multiple dimensions + """ + np.random.seed(seed) + timesteps = pd.date_range('2020-01-01', periods=n_timesteps, freq='h') + + ds = xr.Dataset( + coords={ + 'time': timesteps, + 'component': ['comp1', 'comp2'], + 'bus': ['bus1', 'bus2'], + 'scenario': ['base', 'alt'], + } + ) + + # Variable with only time dimension + ds['total_demand'] = xr.DataArray( + np.random.randn(n_timesteps), + dims=['time'], + coords={'time': ds.time}, + ) + + # Variable with time + component + ds['component_flow'] = xr.DataArray( + np.random.randn(n_timesteps, 2), + dims=['time', 'component'], + coords={'time': ds.time, 'component': ds.component}, + ) + + # Variable with time + bus + ds['bus_balance'] = xr.DataArray( + np.random.randn(n_timesteps, 2), + dims=['time', 'bus'], + coords={'time': ds.time, 'bus': ds.bus}, + ) + + # Variable with time + component + bus + ds['flow_on_bus'] = xr.DataArray( + np.random.randn(n_timesteps, 2, 2), + dims=['time', 'component', 'bus'], + coords={'time': ds.time, 'component': ds.component, 'bus': ds.bus}, + ) + + # Variable with time + scenario + ds['scenario_demand'] = xr.DataArray( + np.random.randn(n_timesteps, 2), + dims=['time', 'scenario'], + coords={'time': ds.time, 'scenario': ds.scenario}, + ) + + # Variable with time + component + scenario + ds['component_scenario_flow'] = xr.DataArray( + np.random.randn(n_timesteps, 2, 2), + dims=['time', 'component', 'scenario'], + coords={'time': ds.time, 'component': ds.component, 'scenario': ds.scenario}, + ) + + return ds + + +@pytest.mark.parametrize('method', ['mean', 'sum', 'max', 'min', 'first', 'last']) +@pytest.mark.parametrize('freq', ['2h', '4h', '1D']) +def test_resample_equivalence_mixed_dimensions(method, freq): + """ + Test that _resample_by_dimension_groups produces same results as naive resampling. + + Uses a dataset with variables having different dimension structures. + """ + ds = create_dataset_with_mixed_dimensions(n_timesteps=100) + + # Method 1: Optimized approach (with dimension grouping) + result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, freq, method) + + # Method 2: Naive approach (direct Dataset resampling) + result_naive = naive_dataset_resample(ds, freq, method) + + # Compare results + xr.testing.assert_allclose(result_optimized, result_naive) + + +@pytest.mark.parametrize('method', ['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median']) +def test_resample_equivalence_single_dimension(method): + """ + Test with variables having only time dimension. + """ + timesteps = pd.date_range('2020-01-01', periods=48, freq='h') + + ds = xr.Dataset(coords={'time': timesteps}) + ds['var1'] = xr.DataArray(np.random.randn(48), dims=['time'], coords={'time': ds.time}) + ds['var2'] = xr.DataArray(np.random.randn(48) * 10, dims=['time'], coords={'time': ds.time}) + ds['var3'] = xr.DataArray(np.random.randn(48) / 5, dims=['time'], coords={'time': ds.time}) + + # Optimized approach + result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', method) + + # Naive approach + result_naive = naive_dataset_resample(ds, '2h', method) + + # Compare results + xr.testing.assert_allclose(result_optimized, result_naive) + + +def test_resample_equivalence_empty_dataset(): + """ + Test with an empty dataset (edge case). + """ + timesteps = pd.date_range('2020-01-01', periods=48, freq='h') + ds = xr.Dataset(coords={'time': timesteps}) + + # Both should handle empty dataset gracefully + result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', 'mean') + result_naive = naive_dataset_resample(ds, '2h', 'mean') + + xr.testing.assert_allclose(result_optimized, result_naive) + + +def test_resample_equivalence_single_variable(): + """ + Test with a single variable. + """ + timesteps = pd.date_range('2020-01-01', periods=48, freq='h') + ds = xr.Dataset(coords={'time': timesteps}) + ds['single_var'] = xr.DataArray(np.random.randn(48), dims=['time'], coords={'time': ds.time}) + + # Test multiple methods + for method in ['mean', 'sum', 'max', 'min']: + result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '3h', method) + result_naive = naive_dataset_resample(ds, '3h', method) + + xr.testing.assert_allclose(result_optimized, result_naive) + + +def test_resample_equivalence_with_nans(): + """ + Test with NaN values to ensure they're handled consistently. + """ + timesteps = pd.date_range('2020-01-01', periods=48, freq='h') + + ds = xr.Dataset(coords={'time': timesteps, 'component': ['a', 'b']}) + + # Create variable with some NaN values + data = np.random.randn(48, 2) + data[5:10, 0] = np.nan + data[20:25, 1] = np.nan + + ds['var_with_nans'] = xr.DataArray( + data, dims=['time', 'component'], coords={'time': ds.time, 'component': ds.component} + ) + + # Test with methods that handle NaNs + for method in ['mean', 'sum', 'max', 'min', 'first', 'last']: + result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', method) + result_naive = naive_dataset_resample(ds, '2h', method) + + xr.testing.assert_allclose(result_optimized, result_naive) + + +def test_resample_equivalence_different_dimension_orders(): + """ + Test that dimension order doesn't affect the equivalence. + """ + timesteps = pd.date_range('2020-01-01', periods=48, freq='h') + + ds = xr.Dataset( + coords={ + 'time': timesteps, + 'x': ['x1', 'x2'], + 'y': ['y1', 'y2'], + } + ) + + # Variable with time first + ds['var_time_first'] = xr.DataArray( + np.random.randn(48, 2, 2), + dims=['time', 'x', 'y'], + coords={'time': ds.time, 'x': ds.x, 'y': ds.y}, + ) + + # Variable with time in middle + ds['var_time_middle'] = xr.DataArray( + np.random.randn(2, 48, 2), + dims=['x', 'time', 'y'], + coords={'x': ds.x, 'time': ds.time, 'y': ds.y}, + ) + + # Variable with time last + ds['var_time_last'] = xr.DataArray( + np.random.randn(2, 2, 48), + dims=['x', 'y', 'time'], + coords={'x': ds.x, 'y': ds.y, 'time': ds.time}, + ) + + for method in ['mean', 'sum', 'max', 'min']: + result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', method) + result_naive = naive_dataset_resample(ds, '2h', method) + + xr.testing.assert_allclose(result_optimized, result_naive) + + +def test_resample_equivalence_multiple_variables_same_dims(): + """ + Test with multiple variables sharing the same dimensions. + + This is the key optimization case - variables with same dims should be + grouped and resampled together. + """ + timesteps = pd.date_range('2020-01-01', periods=48, freq='h') + + ds = xr.Dataset(coords={'time': timesteps, 'location': ['A', 'B', 'C']}) + + # Multiple variables with same dimensions (time, location) + for i in range(3): + ds[f'var_{i}'] = xr.DataArray( + np.random.randn(48, 3), + dims=['time', 'location'], + coords={'time': ds.time, 'location': ds.location}, + ) + + for method in ['mean', 'sum', 'max', 'min']: + result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', method) + result_naive = naive_dataset_resample(ds, '2h', method) + + xr.testing.assert_allclose(result_optimized, result_naive) + + +def test_resample_equivalence_large_dataset(): + """ + Test with a larger, more realistic dataset. + """ + timesteps = pd.date_range('2020-01-01', periods=168, freq='h') # One week + + ds = xr.Dataset( + coords={ + 'time': timesteps, + 'component': [f'comp_{i}' for i in range(5)], + 'bus': [f'bus_{i}' for i in range(3)], + } + ) + + # Various variable types + ds['simple_var'] = xr.DataArray(np.random.randn(168), dims=['time'], coords={'time': ds.time}) + ds['component_var'] = xr.DataArray( + np.random.randn(168, 5), dims=['time', 'component'], coords={'time': ds.time, 'component': ds.component} + ) + ds['bus_var'] = xr.DataArray(np.random.randn(168, 3), dims=['time', 'bus'], coords={'time': ds.time, 'bus': ds.bus}) + ds['complex_var'] = xr.DataArray( + np.random.randn(168, 5, 3), + dims=['time', 'component', 'bus'], + coords={'time': ds.time, 'component': ds.component, 'bus': ds.bus}, + ) + + # Test with a subset of methods (to keep test time reasonable) + for method in ['mean', 'sum', 'first']: + result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '1D', method) + result_naive = naive_dataset_resample(ds, '1D', method) + + xr.testing.assert_allclose(result_optimized, result_naive) + + +def test_resample_equivalence_with_kwargs(): + """ + Test that kwargs are properly forwarded to resample(). + + Verifies that additional arguments like label and closed are correctly + passed through the optimization path. + """ + timesteps = pd.date_range('2020-01-01', periods=48, freq='h') + ds = xr.Dataset(coords={'time': timesteps}) + ds['var'] = xr.DataArray(np.random.randn(48), dims=['time'], coords={'time': ds.time}) + + kwargs = {'label': 'right', 'closed': 'right'} + result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', 'mean', **kwargs) + result_naive = ds.resample(time='2h', **kwargs).mean() + + xr.testing.assert_allclose(result_optimized, result_naive) + + +if __name__ == '__main__': + pytest.main(['-v', __file__]) From 968ff89aae3e5929b72afb58d25df1bdd323de7d Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Thu, 6 Nov 2025 19:26:46 +0100 Subject: [PATCH 05/35] Update CHANGELOG.md --- CHANGELOG.md | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 13cb24f38..f004372b0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -56,13 +56,10 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOpt/flixOpt/releases/tag/v3.0.0) and [Migration Guide](https://flixopt.github.io/flixopt/latest/user-guide/migration-guide-v3/). ### ✨ Added -- Added options to resample and select subsets of the flowsystems without converting to and from Dataset each time. Use the new methods `FlowSystem.__dataset_resample()`, `FlowSystem.__dataset_sel()` and `FlowSystem.__dataset_isel()`. All of them expect and return a dataset. ### 💥 Breaking Changes ### ♻️ Changed -- Truncate repr of FlowSystem and CalculationResults to only show the first 10 items of each category -- Greatly sped up the resampling of a FlowSystem again ### 🗑️ Deprecated @@ -84,6 +81,23 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp Until here --> +## [3.5.0] - 2025-11-06 + +**Summary**: Improve representations and improve resampling + +If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOpt/flixOpt/releases/tag/v3.0.0) and [Migration Guide](https://flixopt.github.io/flixopt/latest/user-guide/migration-guide-v3/). + +### ✨ Added +- Added options to resample and select subsets of flowsystems without converting to and from Dataset each time. Use the new methods `FlowSystem.__dataset_resample()`, `FlowSystem.__dataset_sel()` and `FlowSystem.__dataset_isel()`. All of them expect and return a dataset. + +### 💥 Breaking Changes + +### ♻️ Changed +- Truncate repr of FlowSystem and CalculationResults to only show the first 10 items of each category +- Greatly sped up the resampling of a FlowSystem again + +--- + ## [3.4.1] - 2025-11-04 **Summary**: Speed up resampling by 20-40 times. From 5f96f6f2687e77080913fed1852707e8dc1397cb Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Thu, 6 Nov 2025 20:17:43 +0100 Subject: [PATCH 06/35] Fix bug regarding attrs --- flixopt/flow_system.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index eeb35e6f2..1fc280226 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -1265,6 +1265,7 @@ def _dataset_resample( ) -> xr.Dataset: """ Resample dataset along time dimension (for power users to avoid conversion overhead). + Preserves only the attrs of the Dataset. Uses optimized _resample_by_dimension_groups() to avoid broadcasting issues. See _dataset_sel() for usage pattern. @@ -1286,6 +1287,9 @@ def _dataset_resample( if method not in available_methods: raise ValueError(f'Unsupported resampling method: {method}. Available: {available_methods}') + # Preserve original dataset attributes (especially the reference structure) + original_attrs = dict(dataset.attrs) + # Separate time and non-time variables time_var_names = [v for v in dataset.data_vars if 'time' in dataset[v].dims] non_time_var_names = [v for v in dataset.data_vars if v not in time_var_names] @@ -1303,6 +1307,9 @@ def _dataset_resample( else: result = resampled_time_dataset + # Restore original attributes (xr.merge can drop them) + result.attrs.update(original_attrs) + # Update time-related attributes based on new time index return cls._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps) From 3a4d7736d2bf102302fffb47052410e775dc5a2c Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Thu, 6 Nov 2025 22:10:40 +0100 Subject: [PATCH 07/35] Add test for resampling --- tests/test_flow_system_resample.py | 293 +++++++++++++++++++++++++++++ 1 file changed, 293 insertions(+) create mode 100644 tests/test_flow_system_resample.py diff --git a/tests/test_flow_system_resample.py b/tests/test_flow_system_resample.py new file mode 100644 index 000000000..d28872a0f --- /dev/null +++ b/tests/test_flow_system_resample.py @@ -0,0 +1,293 @@ +"""Integration tests for FlowSystem.resample() - verifies correct data resampling and structure preservation.""" + +import numpy as np +import pandas as pd +import pytest +from numpy.testing import assert_allclose + +import flixopt as fx + + +@pytest.fixture +def simple_fs(): + """Simple FlowSystem with basic components.""" + timesteps = pd.date_range('2023-01-01', periods=24, freq='h') + fs = fx.FlowSystem(timesteps) + fs.add_elements( + fx.Bus('heat'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True) + ) + fs.add_elements( + fx.Sink( + label='demand', + inputs=[fx.Flow(label='in', bus='heat', fixed_relative_profile=np.linspace(10, 20, 24), size=1)], + ), + fx.Source( + label='source', outputs=[fx.Flow(label='out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})] + ), + ) + return fs + + +@pytest.fixture +def complex_fs(): + """FlowSystem with complex elements (storage, piecewise, invest).""" + timesteps = pd.date_range('2023-01-01', periods=48, freq='h') + fs = fx.FlowSystem(timesteps) + + fs.add_elements( + fx.Bus('heat'), + fx.Bus('elec'), + fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True), + ) + + # Storage + fs.add_elements( + fx.Storage( + label='battery', + charging=fx.Flow('charge', bus='elec', size=10), + discharging=fx.Flow('discharge', bus='elec', size=10), + capacity_in_flow_hours=fx.InvestParameters(fixed_size=100), + ) + ) + + # Piecewise converter + converter = fx.linear_converters.Boiler( + 'boiler', eta=0.9, Q_fu=fx.Flow('gas', bus='elec'), Q_th=fx.Flow('heat', bus='heat') + ) + converter.Q_th.size = 100 + fs.add_elements(converter) + + # Component with investment + fs.add_elements( + fx.Source( + label='pv', + outputs=[ + fx.Flow( + 'gen', + bus='elec', + size=fx.InvestParameters(maximum_size=1000, effects_of_investment_per_size={'costs': 100}), + ) + ], + ) + ) + + return fs + + +# === Basic Functionality === + + +@pytest.mark.parametrize('freq,method', [('2h', 'mean'), ('4h', 'sum'), ('6h', 'first')]) +def test_basic_resample(simple_fs, freq, method): + """Test basic resampling preserves structure.""" + fs_r = simple_fs.resample(freq, method=method) + assert len(fs_r.components) == len(simple_fs.components) + assert len(fs_r.buses) == len(simple_fs.buses) + assert len(fs_r.timesteps) < len(simple_fs.timesteps) + + +@pytest.mark.parametrize( + 'method,expected', + [ + ('mean', [15.0, 35.0]), + ('sum', [30.0, 70.0]), + ('first', [10.0, 30.0]), + ('last', [20.0, 40.0]), + ], +) +def test_resample_methods(method, expected): + """Test different resampling methods.""" + ts = pd.date_range('2023-01-01', periods=4, freq='h') + fs = fx.FlowSystem(ts) + fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) + fs.add_elements( + fx.Sink( + label='s', + inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.array([10.0, 20.0, 30.0, 40.0]), size=1)], + ) + ) + + fs_r = fs.resample('2h', method=method) + assert_allclose(fs_r.flows['s(in)'].fixed_relative_profile.values, expected, rtol=1e-10) + + +def test_structure_preserved(simple_fs): + """Test all structural elements preserved.""" + fs_r = simple_fs.resample('2h', method='mean') + assert set(simple_fs.components.keys()) == set(fs_r.components.keys()) + assert set(simple_fs.buses.keys()) == set(fs_r.buses.keys()) + assert set(simple_fs.effects.keys()) == set(fs_r.effects.keys()) + + # Flow connections preserved + for label in simple_fs.flows.keys(): + assert simple_fs.flows[label].bus == fs_r.flows[label].bus + assert simple_fs.flows[label].component == fs_r.flows[label].component + + +def test_time_metadata_updated(simple_fs): + """Test time metadata correctly updated.""" + fs_r = simple_fs.resample('3h', method='mean') + assert len(fs_r.timesteps) == 8 + assert_allclose(fs_r.hours_per_timestep.values, 3.0) + assert fs_r.hours_of_last_timestep == 3.0 + + +# === Advanced Dimensions === + + +@pytest.mark.parametrize( + 'dim_name,dim_value', + [ + ('periods', pd.Index([2023, 2024], name='period')), + ('scenarios', pd.Index(['base', 'high'], name='scenario')), + ], +) +def test_with_dimensions(simple_fs, dim_name, dim_value): + """Test resampling preserves period/scenario dimensions.""" + fs = fx.FlowSystem(simple_fs.timesteps, **{dim_name: dim_value}) + fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) + fs.add_elements( + fx.Sink(label='d', inputs=[fx.Flow(label='in', bus='h', fixed_relative_profile=np.ones(24), size=1)]) + ) + + fs_r = fs.resample('2h', method='mean') + assert getattr(fs_r, dim_name) is not None + pd.testing.assert_index_equal(getattr(fs_r, dim_name), dim_value) + + +# === Complex Elements === + + +def test_storage_resample(complex_fs): + """Test storage component resampling.""" + fs_r = complex_fs.resample('4h', method='mean') + assert 'battery' in fs_r.components + storage = fs_r.components['battery'] + assert storage.charging.label == 'charge' + assert storage.discharging.label == 'discharge' + + +def test_converter_resample(complex_fs): + """Test converter component resampling.""" + fs_r = complex_fs.resample('4h', method='mean') + assert 'boiler' in fs_r.components + boiler = fs_r.components['boiler'] + assert hasattr(boiler, 'eta') + + +def test_invest_resample(complex_fs): + """Test investment parameters preserved.""" + fs_r = complex_fs.resample('4h', method='mean') + pv_flow = fs_r.flows['pv(gen)'] + assert isinstance(pv_flow.size, fx.InvestParameters) + assert pv_flow.size.maximum_size == 1000 + + +# === Modeling Integration === + + +@pytest.mark.parametrize('with_dim', [None, 'periods', 'scenarios']) +def test_modeling(with_dim): + """Test resampled FlowSystem can be modeled.""" + ts = pd.date_range('2023-01-01', periods=48, freq='h') + kwargs = {} + if with_dim == 'periods': + kwargs['periods'] = pd.Index([2023, 2024], name='period') + elif with_dim == 'scenarios': + kwargs['scenarios'] = pd.Index(['base', 'high'], name='scenario') + + fs = fx.FlowSystem(ts, **kwargs) + fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) + fs.add_elements( + fx.Sink( + label='d', inputs=[fx.Flow(label='in', bus='h', fixed_relative_profile=np.linspace(10, 30, 48), size=1)] + ), + fx.Source(label='s', outputs=[fx.Flow(label='out', bus='h', size=100, effects_per_flow_hour={'costs': 0.05})]), + ) + + fs_r = fs.resample('4h', method='mean') + calc = fx.FullCalculation('test', fs_r) + calc.do_modeling() + + assert calc.model is not None + assert len(calc.model.variables) > 0 + + +def test_model_structure_preserved(): + """Test model structure (var/constraint types) preserved.""" + ts = pd.date_range('2023-01-01', periods=48, freq='h') + fs = fx.FlowSystem(ts) + fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) + fs.add_elements( + fx.Sink( + label='d', inputs=[fx.Flow(label='in', bus='h', fixed_relative_profile=np.linspace(10, 30, 48), size=1)] + ), + fx.Source(label='s', outputs=[fx.Flow(label='out', bus='h', size=100, effects_per_flow_hour={'costs': 0.05})]), + ) + + calc_orig = fx.FullCalculation('orig', fs) + calc_orig.do_modeling() + + fs_r = fs.resample('4h', method='mean') + calc_r = fx.FullCalculation('resamp', fs_r) + calc_r.do_modeling() + + # Same number of variable/constraint types + assert len(calc_orig.model.variables) == len(calc_r.model.variables) + assert len(calc_orig.model.constraints) == len(calc_r.model.constraints) + + # Same names + assert set(calc_orig.model.variables.labels.data_vars.keys()) == set(calc_r.model.variables.labels.data_vars.keys()) + assert set(calc_orig.model.constraints.labels.data_vars.keys()) == set( + calc_r.model.constraints.labels.data_vars.keys() + ) + + +# === Advanced Features === + + +def test_dataset_roundtrip(simple_fs): + """Test dataset serialization.""" + fs_r = simple_fs.resample('2h', method='mean') + assert fx.FlowSystem.from_dataset(fs_r.to_dataset()) == fs_r + + +def test_dataset_chaining(simple_fs): + """Test power user pattern.""" + ds = simple_fs.to_dataset() + ds = fx.FlowSystem._dataset_sel(ds, time='2023-01-01') + ds = fx.FlowSystem._dataset_resample(ds, freq='2h', method='mean') + fs_result = fx.FlowSystem.from_dataset(ds) + + fs_simple = simple_fs.sel(time='2023-01-01').resample('2h', method='mean') + assert fs_result == fs_simple + + +@pytest.mark.parametrize('freq,exp_len', [('2h', 84), ('6h', 28), ('1D', 7)]) +def test_frequencies(freq, exp_len): + """Test various frequencies.""" + ts = pd.date_range('2023-01-01', periods=168, freq='h') + fs = fx.FlowSystem(ts) + fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) + fs.add_elements( + fx.Sink(label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.ones(168), size=1)]) + ) + + assert len(fs.resample(freq, method='mean').timesteps) == exp_len + + +def test_irregular_timesteps(): + """Test irregular timesteps.""" + ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time') + fs = fx.FlowSystem(ts) + fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) + fs.add_elements( + fx.Sink(label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.ones(3), size=1)]) + ) + + fs_r = fs.resample('1h', method='mean') + assert len(fs_r.timesteps) > 0 + + +if __name__ == '__main__': + pytest.main(['-v', __file__]) From 3f2542fd821fd38e3ea6fcbe827e0e763397e21b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 14 Nov 2025 16:53:51 +0100 Subject: [PATCH 08/35] chore(deps): update dependency mkdocs-material to v9.6.23 (#462) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 764dbea1d..eb1fea0f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,7 +98,7 @@ dev = [ # Documentation building docs = [ "mkdocs==1.6.1", - "mkdocs-material==9.6.22", + "mkdocs-material==9.6.23", "mkdocstrings-python==1.18.2", "mkdocs-table-reader-plugin==3.1.0", "mkdocs-gen-files==0.5.0", From 097f73fdee165b6ebc887910fea74d015477f930 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 17:54:08 +0100 Subject: [PATCH 09/35] Overhaul types --- flixopt/__init__.py | 3 + flixopt/components.py | 29 ++++---- flixopt/core.py | 22 +++++-- flixopt/effects.py | 21 ++++-- flixopt/elements.py | 7 +- flixopt/interface.py | 19 +++--- flixopt/types.py | 149 ++++++++++++++++++++++++++++++++++++++++++ 7 files changed, 216 insertions(+), 34 deletions(-) create mode 100644 flixopt/types.py diff --git a/flixopt/__init__.py b/flixopt/__init__.py index 3633d86a1..31a242fe0 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -41,6 +41,9 @@ solvers, ) +# Type system for dimension-aware type hints +from .types import Data, Period, Scalar, Scenario, Time + # === Runtime warning suppression for third-party libraries === # These warnings are from dependencies and cannot be fixed by end users. # They are suppressed at runtime to provide a cleaner user experience. diff --git a/flixopt/components.py b/flixopt/components.py index e4209c8ac..818f349f5 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -23,6 +23,7 @@ import linopy from .flow_system import FlowSystem + from .types import Data, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -169,7 +170,7 @@ def __init__( inputs: list[Flow], outputs: list[Flow], on_off_parameters: OnOffParameters | None = None, - conversion_factors: list[dict[str, TemporalDataUser]] | None = None, + conversion_factors: list[dict[str, Data[Time, Scenario]]] | None = None, piecewise_conversion: PiecewiseConversion | None = None, meta_data: dict | None = None, ): @@ -386,17 +387,17 @@ def __init__( label: str, charging: Flow, discharging: Flow, - capacity_in_flow_hours: PeriodicDataUser | InvestParameters, - relative_minimum_charge_state: TemporalDataUser = 0, - relative_maximum_charge_state: TemporalDataUser = 1, - initial_charge_state: PeriodicDataUser | Literal['lastValueOfSim'] = 0, - minimal_final_charge_state: PeriodicDataUser | None = None, - maximal_final_charge_state: PeriodicDataUser | None = None, - relative_minimum_final_charge_state: PeriodicDataUser | None = None, - relative_maximum_final_charge_state: PeriodicDataUser | None = None, - eta_charge: TemporalDataUser = 1, - eta_discharge: TemporalDataUser = 1, - relative_loss_per_hour: TemporalDataUser = 0, + capacity_in_flow_hours: Data[Period, Scenario] | InvestParameters, + relative_minimum_charge_state: Data[Time, Scenario] = 0, + relative_maximum_charge_state: Data[Time, Scenario] = 1, + initial_charge_state: Data[Period, Scenario] | Literal['lastValueOfSim'] = 0, + minimal_final_charge_state: Data[Period, Scenario] | None = None, + maximal_final_charge_state: Data[Period, Scenario] | None = None, + relative_minimum_final_charge_state: Data[Period, Scenario] | None = None, + relative_maximum_final_charge_state: Data[Period, Scenario] | None = None, + eta_charge: Data[Time, Scenario] = 1, + eta_discharge: Data[Time, Scenario] = 1, + relative_loss_per_hour: Data[Time, Scenario] = 0, prevent_simultaneous_charge_and_discharge: bool = True, balanced: bool = False, meta_data: dict | None = None, @@ -663,8 +664,8 @@ def __init__( out1: Flow, in2: Flow | None = None, out2: Flow | None = None, - relative_losses: TemporalDataUser | None = None, - absolute_losses: TemporalDataUser | None = None, + relative_losses: Data[Time, Scenario] | None = None, + absolute_losses: Data[Time, Scenario] | None = None, on_off_parameters: OnOffParameters = None, prevent_simultaneous_flows_in_both_directions: bool = True, balanced: bool = False, diff --git a/flixopt/core.py b/flixopt/core.py index 917ee2984..1b8e1a660 100644 --- a/flixopt/core.py +++ b/flixopt/core.py @@ -12,13 +12,22 @@ import pandas as pd import xarray as xr +from flixopt.types import Data, Period, Scalar, Scenario, Time + logger = logging.getLogger('flixopt') -Scalar = int | float +# Legacy type aliases (kept for backward compatibility) +# These are being replaced by dimension-aware Data[...] types +Scalar = Scalar """A single number, either integer or float.""" -PeriodicDataUser = int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray -"""User data which has no time dimension. Internally converted to a Scalar or an xr.DataArray without a time dimension.""" +PeriodicDataUser = Data[Period, Scenario] +""" +User data which has no time dimension. Internally converted to a Scalar or an xr.DataArray without a time dimension. + +.. deprecated:: + Use dimension-aware types instead: `Data[Period, Scenario]` or `Data[Scenario]` +""" PeriodicData = xr.DataArray """Internally used datatypes for periodic data.""" @@ -153,7 +162,12 @@ def agg_weight(self): TemporalDataUser = ( int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray | TimeSeriesData ) -"""User data which might have a time dimension. Internally converted to an xr.DataArray with time dimension.""" +""" +User data which might have a time dimension. Internally converted to an xr.DataArray with time dimension. + +.. deprecated:: + Use dimension-aware types instead: `Data[Time]`, `Data[Time, Scenario]`, or `Data[Time, Period, Scenario]` +""" TemporalData = xr.DataArray | TimeSeriesData """Internally used datatypes for temporal data (data with a time dimension).""" diff --git a/flixopt/effects.py b/flixopt/effects.py index ddf8eadeb..44bc6d25c 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -20,6 +20,7 @@ from .core import PeriodicDataUser, Scalar, TemporalData, TemporalDataUser from .features import ShareAllocationModel from .structure import Element, ElementContainer, ElementModel, FlowSystemModel, Submodel, register_class_for_io +from .types import Data, Period, Scenario, Time if TYPE_CHECKING: from collections.abc import Iterator @@ -436,11 +437,23 @@ def _do_modeling(self): ) -TemporalEffectsUser = TemporalDataUser | dict[str, TemporalDataUser] # User-specified Shares to Effects -""" This datatype is used to define a temporal share to an effect by a certain attribute. """ +TemporalEffectsUser = Data[Time, Scenario] | dict[str, Data[Time, Scenario]] # User-specified Shares to Effects +""" +This datatype is used to define a temporal share to an effect by a certain attribute. + +Can be: +- A single value (scalar, array, Series, DataFrame, DataArray) with at most [Time, Scenario] dimensions +- A dictionary mapping effect names to values with at most [Time, Scenario] dimensions +""" -PeriodicEffectsUser = PeriodicDataUser | dict[str, PeriodicDataUser] # User-specified Shares to Effects -""" This datatype is used to define a scalar share to an effect by a certain attribute. """ +PeriodicEffectsUser = Data[Period, Scenario] | dict[str, Data[Period, Scenario]] # User-specified Shares to Effects +""" +This datatype is used to define a periodic share to an effect by a certain attribute. + +Can be: +- A single value (scalar, array, Series, DataFrame, DataArray) with at most [Period, Scenario] dimensions +- A dictionary mapping effect names to values with at most [Period, Scenario] dimensions +""" TemporalEffects = dict[str, TemporalData] # User-specified Shares to Effects """ This datatype is used internally to handle temporal shares to an effect. """ diff --git a/flixopt/elements.py b/flixopt/elements.py index 337f34fce..46ac8f6e8 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -24,6 +24,7 @@ from .effects import TemporalEffectsUser from .flow_system import FlowSystem + from .types import Data, Scenario, Time logger = logging.getLogger('flixopt') @@ -420,9 +421,9 @@ def __init__( label: str, bus: str, size: Scalar | InvestParameters = None, - fixed_relative_profile: TemporalDataUser | None = None, - relative_minimum: TemporalDataUser = 0, - relative_maximum: TemporalDataUser = 1, + fixed_relative_profile: Data[Time, Scenario] | None = None, + relative_minimum: Data[Time, Scenario] = 0, + relative_maximum: Data[Time, Scenario] = 1, effects_per_flow_hour: TemporalEffectsUser | None = None, on_off_parameters: OnOffParameters | None = None, flow_hours_total_max: Scalar | None = None, diff --git a/flixopt/interface.py b/flixopt/interface.py index 21cbc82b9..72d7342a3 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -22,6 +22,7 @@ from .core import PeriodicData, PeriodicDataUser, Scalar, TemporalDataUser from .effects import PeriodicEffectsUser, TemporalEffectsUser from .flow_system import FlowSystem + from .types import Data, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -73,7 +74,7 @@ class Piece(Interface): """ - def __init__(self, start: TemporalDataUser, end: TemporalDataUser): + def __init__(self, start: Data[Time, Period, Scenario], end: Data[Time, Period, Scenario]): self.start = start self.end = end self.has_time_dim = False @@ -874,15 +875,15 @@ class InvestParameters(Interface): def __init__( self, - fixed_size: PeriodicDataUser | None = None, - minimum_size: PeriodicDataUser | None = None, - maximum_size: PeriodicDataUser | None = None, + fixed_size: Data[Period, Scenario] | None = None, + minimum_size: Data[Period, Scenario] | None = None, + maximum_size: Data[Period, Scenario] | None = None, mandatory: bool = False, effects_of_investment: PeriodicEffectsUser | None = None, effects_of_investment_per_size: PeriodicEffectsUser | None = None, effects_of_retirement: PeriodicEffectsUser | None = None, piecewise_effects_of_investment: PiecewiseEffects | None = None, - linked_periods: PeriodicDataUser | tuple[int, int] | None = None, + linked_periods: Data[Period, Scenario] | tuple[int, int] | None = None, **kwargs, ): # Handle deprecated parameters using centralized helper @@ -1272,10 +1273,10 @@ def __init__( effects_per_running_hour: TemporalEffectsUser | None = None, on_hours_total_min: int | None = None, on_hours_total_max: int | None = None, - consecutive_on_hours_min: TemporalDataUser | None = None, - consecutive_on_hours_max: TemporalDataUser | None = None, - consecutive_off_hours_min: TemporalDataUser | None = None, - consecutive_off_hours_max: TemporalDataUser | None = None, + consecutive_on_hours_min: Data[Time, Scenario] | None = None, + consecutive_on_hours_max: Data[Time, Scenario] | None = None, + consecutive_off_hours_min: Data[Time, Scenario] | None = None, + consecutive_off_hours_max: Data[Time, Scenario] | None = None, switch_on_total_max: int | None = None, force_switch_on: bool = False, ): diff --git a/flixopt/types.py b/flixopt/types.py new file mode 100644 index 000000000..345d4a48c --- /dev/null +++ b/flixopt/types.py @@ -0,0 +1,149 @@ +""" +Type system for dimension-aware data in flixopt. + +This module provides generic types that clearly communicate which dimensions +data can have. The type system is designed to be self-documenting while +maintaining maximum flexibility for input formats. + +Key Concepts +------------ +- Dimension markers (`Time`, `Period`, `Scenario`) represent the possible dimensions +- `Data[...]` generic type indicates the **maximum** dimensions data can have +- Data can have any subset of the specified dimensions (including being scalar) +- All standard input formats are supported (scalar, array, Series, DataFrame, DataArray) + +Examples +-------- +Type hint `Data[Time]` accepts: + - Scalar: `0.5` (broadcast to all timesteps) + - 1D array: `np.array([1, 2, 3])` (matched to time dimension) + - pandas Series: with DatetimeIndex matching flow system + - xarray DataArray: with 'time' dimension + +Type hint `Data[Time, Scenario]` accepts: + - Scalar: `100` (broadcast to all time and scenario combinations) + - 1D array: matched to time OR scenario dimension + - 2D array: matched to both dimensions + - pandas DataFrame: columns as scenarios, index as time + - xarray DataArray: with any subset of 'time', 'scenario' dimensions + +Type hint `Data[Period, Scenario]` (periodic data, no time): + - Used for investment parameters that vary by planning period + - Accepts scalars, arrays matching periods/scenarios, or DataArrays + +Type hint `Scalar`: + - Only numeric scalars (int, float) + - Not converted to DataArray, stays as scalar +""" + +from typing import Any, TypeAlias + +import numpy as np +import pandas as pd +import xarray as xr + + +# Dimension marker classes for generic type subscripting +class Time: + """Marker for the time dimension in Data generic types.""" + + pass + + +class Period: + """Marker for the period dimension in Data generic types (for multi-period optimization).""" + + pass + + +class Scenario: + """Marker for the scenario dimension in Data generic types (for scenario analysis).""" + + pass + + +class _DataMeta(type): + """Metaclass for Data to enable subscript notation Data[Time, Scenario].""" + + def __getitem__(cls, dimensions): + """ + Create a type hint showing maximum dimensions. + + The dimensions parameter can be: + - A single dimension: Data[Time] + - Multiple dimensions: Data[Time, Scenario] + + The type hint communicates that data can have **at most** these dimensions. + Actual data can be: + - Scalar (broadcast to all dimensions) + - Have any subset of the specified dimensions + - Have all specified dimensions + + This is consistent with xarray's broadcasting semantics and the + framework's data conversion behavior. + """ + # For type checking purposes, we return the same union type regardless + # of which dimensions are specified. The dimension parameters serve + # as documentation rather than runtime validation. + + # Return type that includes all possible input formats + return int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray + + +class Data(metaclass=_DataMeta): + """ + Generic type for data that can have various dimensions. + + Use subscript notation to specify the maximum dimensions: + - `Data[Time]`: Time-varying data (at most 'time' dimension) + - `Data[Time, Scenario]`: Time-varying with scenarios (at most 'time', 'scenario') + - `Data[Period, Scenario]`: Periodic data without time (at most 'period', 'scenario') + - `Data[Time, Period, Scenario]`: Full dimensionality (rarely used) + + Semantics: "At Most" Dimensions + -------------------------------- + When you see `Data[Time, Scenario]`, it means the data can have: + - No dimensions (scalar): broadcast to all time and scenario values + - Just 'time': broadcast across scenarios + - Just 'scenario': broadcast across time + - Both 'time' and 'scenario': full dimensionality + + Accepted Input Formats + ---------------------- + All dimension combinations accept these formats: + - Scalars: int, float (including numpy types) + - Arrays: numpy ndarray (matched by length/shape to dimensions) + - pandas Series: matched by index to dimension coordinates + - pandas DataFrame: typically columns=scenarios, index=time + - xarray DataArray: used directly with dimension validation + + Conversion Behavior + ------------------- + Input data is converted to xarray.DataArray internally: + - Scalars are broadcast to all specified dimensions + - Arrays are matched by length (unambiguous) or shape (multi-dimensional) + - Series are matched by index equality with coordinate values + - DataArrays are validated and broadcast as needed + + See Also + -------- + DataConverter.to_dataarray : The conversion implementation + FlowSystem.fit_to_model_coords : Fits data to the model's coordinate system + """ + + # This class is not meant to be instantiated, only used for type hints + def __init__(self): + raise TypeError('Data is a type hint only and cannot be instantiated') + + +# Simple scalar type for dimension-less numeric values +Scalar: TypeAlias = int | float | np.integer | np.floating + +# Export public API +__all__ = [ + 'Data', + 'Time', + 'Period', + 'Scenario', + 'Scalar', +] From 01d4c2dec432afdf40c6d281a1a6ed4f2e513eb9 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 18:07:46 +0100 Subject: [PATCH 10/35] Introduce Bool data --- flixopt/__init__.py | 2 +- flixopt/types.py | 92 ++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 88 insertions(+), 6 deletions(-) diff --git a/flixopt/__init__.py b/flixopt/__init__.py index 31a242fe0..5583e9aaf 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -42,7 +42,7 @@ ) # Type system for dimension-aware type hints -from .types import Data, Period, Scalar, Scenario, Time +from .types import BoolData, Data, Period, Scalar, Scenario, Time # === Runtime warning suppression for third-party libraries === # These warnings are from dependencies and cannot be fixed by end users. diff --git a/flixopt/types.py b/flixopt/types.py index 345d4a48c..77a1e596e 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -62,12 +62,12 @@ class Scenario: pass -class _DataMeta(type): - """Metaclass for Data to enable subscript notation Data[Time, Scenario].""" +class _NumericDataMeta(type): + """Metaclass for Data to enable subscript notation Data[Time, Scenario] for numeric data.""" def __getitem__(cls, dimensions): """ - Create a type hint showing maximum dimensions. + Create a type hint showing maximum dimensions for numeric data. The dimensions parameter can be: - A single dimension: Data[Time] @@ -86,11 +86,24 @@ def __getitem__(cls, dimensions): # of which dimensions are specified. The dimension parameters serve # as documentation rather than runtime validation. - # Return type that includes all possible input formats + # Return type that includes all possible numeric input formats return int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray -class Data(metaclass=_DataMeta): +class _BoolDataMeta(type): + """Metaclass for BoolData to enable subscript notation BoolData[Time, Scenario] for boolean data.""" + + def __getitem__(cls, dimensions): + """ + Create a type hint showing maximum dimensions for boolean data. + + Same semantics as numeric Data, but for boolean values. + """ + # Return type that includes all possible boolean input formats + return bool | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray + + +class Data(metaclass=_NumericDataMeta): """ Generic type for data that can have various dimensions. @@ -125,8 +138,13 @@ class Data(metaclass=_DataMeta): - Series are matched by index equality with coordinate values - DataArrays are validated and broadcast as needed + Note + ---- + This type is for **numeric** data only. For boolean data, use `BoolData`. + See Also -------- + BoolData : For boolean data with dimensions DataConverter.to_dataarray : The conversion implementation FlowSystem.fit_to_model_coords : Fits data to the model's coordinate system """ @@ -136,12 +154,76 @@ def __init__(self): raise TypeError('Data is a type hint only and cannot be instantiated') +class BoolData(metaclass=_BoolDataMeta): + """ + Generic type for boolean data that can have various dimensions. + + Use subscript notation to specify the maximum dimensions: + - `BoolData[Time]`: Time-varying boolean data + - `BoolData[Time, Scenario]`: Boolean data with time and scenario dimensions + - `BoolData[Period, Scenario]`: Periodic boolean data + + Semantics: "At Most" Dimensions + -------------------------------- + Same semantics as Data, but for boolean values. + When you see `BoolData[Time, Scenario]`, the data can have: + - No dimensions (scalar bool): broadcast to all time and scenario values + - Just 'time': broadcast across scenarios + - Just 'scenario': broadcast across time + - Both 'time' and 'scenario': full dimensionality + + Accepted Input Formats (Boolean) + --------------------------------- + All dimension combinations accept these formats: + - Scalars: bool, np.bool_ + - Arrays: numpy ndarray with boolean dtype (matched by length/shape to dimensions) + - pandas Series: with boolean values, matched by index to dimension coordinates + - pandas DataFrame: with boolean values + - xarray DataArray: with boolean values, used directly with dimension validation + + Use Cases + --------- + Boolean data is typically used for: + - Binary decision variables (on/off states) + - Constraint activation flags + - Feasibility indicators + - Conditional parameters + + Examples + -------- + >>> # Scalar boolean (broadcast to all dimensions) + >>> active: BoolData[Time] = True + >>> + >>> # Time-varying on/off pattern + >>> import numpy as np + >>> pattern: BoolData[Time] = np.array([True, False, True, False]) + >>> + >>> # Scenario-specific activation + >>> import pandas as pd + >>> scenario_active: BoolData[Scenario] = pd.Series([True, False, True], index=['low', 'mid', 'high']) + + Note + ---- + This type is for **boolean** data only. For numeric data, use `Data`. + + See Also + -------- + Data : For numeric data with dimensions + DataConverter.to_dataarray : The conversion implementation + """ + + # This class is not meant to be instantiated, only used for type hints + def __init__(self): + raise TypeError('BoolData is a type hint only and cannot be instantiated') + + # Simple scalar type for dimension-less numeric values Scalar: TypeAlias = int | float | np.integer | np.floating # Export public API __all__ = [ 'Data', + 'BoolData', 'Time', 'Period', 'Scenario', From a5b37a7e56d6f71ea086c38188958163c318da91 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 18:14:03 +0100 Subject: [PATCH 11/35] Introduce Bool data --- flixopt/__init__.py | 2 +- flixopt/types.py | 30 ++++++++++++++++++++---------- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/flixopt/__init__.py b/flixopt/__init__.py index 5583e9aaf..f744d05a3 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -42,7 +42,7 @@ ) # Type system for dimension-aware type hints -from .types import BoolData, Data, Period, Scalar, Scenario, Time +from .types import BoolData, Data, NumericData, Period, Scalar, Scenario, Time # === Runtime warning suppression for third-party libraries === # These warnings are from dependencies and cannot be fixed by end users. diff --git a/flixopt/types.py b/flixopt/types.py index 77a1e596e..17aadddce 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -105,17 +105,19 @@ def __getitem__(cls, dimensions): class Data(metaclass=_NumericDataMeta): """ - Generic type for data that can have various dimensions. + Base type for numeric data that can have various dimensions. + + This is the internal base class. Use `NumericData` publicly for clarity. Use subscript notation to specify the maximum dimensions: - - `Data[Time]`: Time-varying data (at most 'time' dimension) - - `Data[Time, Scenario]`: Time-varying with scenarios (at most 'time', 'scenario') - - `Data[Period, Scenario]`: Periodic data without time (at most 'period', 'scenario') - - `Data[Time, Period, Scenario]`: Full dimensionality (rarely used) + - `NumericData[Time]`: Time-varying numeric data (at most 'time' dimension) + - `NumericData[Time, Scenario]`: Time-varying with scenarios (at most 'time', 'scenario') + - `NumericData[Period, Scenario]`: Periodic data without time (at most 'period', 'scenario') + - `NumericData[Time, Period, Scenario]`: Full dimensionality (rarely used) Semantics: "At Most" Dimensions -------------------------------- - When you see `Data[Time, Scenario]`, it means the data can have: + When you see `NumericData[Time, Scenario]`, it means the data can have: - No dimensions (scalar): broadcast to all time and scenario values - Just 'time': broadcast across scenarios - Just 'scenario': broadcast across time @@ -142,8 +144,11 @@ class Data(metaclass=_NumericDataMeta): ---- This type is for **numeric** data only. For boolean data, use `BoolData`. + This is the base class - use `NumericData` alias publicly for clarity and symmetry with `BoolData`. + See Also -------- + NumericData : Public alias for this class BoolData : For boolean data with dimensions DataConverter.to_dataarray : The conversion implementation FlowSystem.fit_to_model_coords : Fits data to the model's coordinate system @@ -204,11 +209,11 @@ class BoolData(metaclass=_BoolDataMeta): Note ---- - This type is for **boolean** data only. For numeric data, use `Data`. + This type is for **boolean** data only. For numeric data, use `NumericData`. See Also -------- - Data : For numeric data with dimensions + NumericData : For numeric data with dimensions DataConverter.to_dataarray : The conversion implementation """ @@ -217,13 +222,18 @@ def __init__(self): raise TypeError('BoolData is a type hint only and cannot be instantiated') +# Public alias for Data (for clarity and symmetry with BoolData) +NumericData = Data +"""Public type for numeric data with dimensions. Alias for the internal `Data` class.""" + # Simple scalar type for dimension-less numeric values Scalar: TypeAlias = int | float | np.integer | np.floating # Export public API __all__ = [ - 'Data', - 'BoolData', + 'NumericData', # Primary public type for numeric data + 'BoolData', # Primary public type for boolean data + 'Data', # Also exported (internal base class, can be used as shorthand) 'Time', 'Period', 'Scenario', From cd5b72b5048b67e86029cebced54d54f9259e987 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 18:23:03 +0100 Subject: [PATCH 12/35] Fix typehints --- flixopt/components.py | 14 +++++++------- flixopt/effects.py | 2 +- flixopt/elements.py | 8 ++++---- flixopt/interface.py | 8 ++++---- flixopt/types.py | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/flixopt/components.py b/flixopt/components.py index 818f349f5..b91f219d9 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -388,16 +388,16 @@ def __init__( charging: Flow, discharging: Flow, capacity_in_flow_hours: Data[Period, Scenario] | InvestParameters, - relative_minimum_charge_state: Data[Time, Scenario] = 0, - relative_maximum_charge_state: Data[Time, Scenario] = 1, + relative_minimum_charge_state: Data[Time, Period, Scenario] = 0, + relative_maximum_charge_state: Data[Time, Period, Scenario] = 1, initial_charge_state: Data[Period, Scenario] | Literal['lastValueOfSim'] = 0, minimal_final_charge_state: Data[Period, Scenario] | None = None, maximal_final_charge_state: Data[Period, Scenario] | None = None, relative_minimum_final_charge_state: Data[Period, Scenario] | None = None, relative_maximum_final_charge_state: Data[Period, Scenario] | None = None, - eta_charge: Data[Time, Scenario] = 1, - eta_discharge: Data[Time, Scenario] = 1, - relative_loss_per_hour: Data[Time, Scenario] = 0, + eta_charge: Data[Time, Period, Scenario] = 1, + eta_discharge: Data[Time, Period, Scenario] = 1, + relative_loss_per_hour: Data[Time, Period, Scenario] = 0, prevent_simultaneous_charge_and_discharge: bool = True, balanced: bool = False, meta_data: dict | None = None, @@ -664,8 +664,8 @@ def __init__( out1: Flow, in2: Flow | None = None, out2: Flow | None = None, - relative_losses: Data[Time, Scenario] | None = None, - absolute_losses: Data[Time, Scenario] | None = None, + relative_losses: Data[Time, Period, Scenario] | None = None, + absolute_losses: Data[Time, Period, Scenario] | None = None, on_off_parameters: OnOffParameters = None, prevent_simultaneous_flows_in_both_directions: bool = True, balanced: bool = False, diff --git a/flixopt/effects.py b/flixopt/effects.py index 44bc6d25c..9c388fe2c 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -437,7 +437,7 @@ def _do_modeling(self): ) -TemporalEffectsUser = Data[Time, Scenario] | dict[str, Data[Time, Scenario]] # User-specified Shares to Effects +TemporalEffectsUser = Data[Time, Period, Scenario] | dict[str, Data[Time, Scenario]] # User-specified Shares to Effects """ This datatype is used to define a temporal share to an effect by a certain attribute. diff --git a/flixopt/elements.py b/flixopt/elements.py index 46ac8f6e8..aa18a67ab 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -24,7 +24,7 @@ from .effects import TemporalEffectsUser from .flow_system import FlowSystem - from .types import Data, Scenario, Time + from .types import Data, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -421,9 +421,9 @@ def __init__( label: str, bus: str, size: Scalar | InvestParameters = None, - fixed_relative_profile: Data[Time, Scenario] | None = None, - relative_minimum: Data[Time, Scenario] = 0, - relative_maximum: Data[Time, Scenario] = 1, + fixed_relative_profile: Data[Time, Period, Scenario] | None = None, + relative_minimum: Data[Time, Period, Scenario] = 0, + relative_maximum: Data[Time, Period, Scenario] = 1, effects_per_flow_hour: TemporalEffectsUser | None = None, on_off_parameters: OnOffParameters | None = None, flow_hours_total_max: Scalar | None = None, diff --git a/flixopt/interface.py b/flixopt/interface.py index 72d7342a3..1ff30b933 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -1273,10 +1273,10 @@ def __init__( effects_per_running_hour: TemporalEffectsUser | None = None, on_hours_total_min: int | None = None, on_hours_total_max: int | None = None, - consecutive_on_hours_min: Data[Time, Scenario] | None = None, - consecutive_on_hours_max: Data[Time, Scenario] | None = None, - consecutive_off_hours_min: Data[Time, Scenario] | None = None, - consecutive_off_hours_max: Data[Time, Scenario] | None = None, + consecutive_on_hours_min: Data[Time, Period, Scenario] | None = None, + consecutive_on_hours_max: Data[Time, Period, Scenario] | None = None, + consecutive_off_hours_min: Data[Time, Period, Scenario] | None = None, + consecutive_off_hours_max: Data[Time, Period, Scenario] | None = None, switch_on_total_max: int | None = None, force_switch_on: bool = False, ): diff --git a/flixopt/types.py b/flixopt/types.py index 17aadddce..58bd61cd9 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -71,7 +71,7 @@ def __getitem__(cls, dimensions): The dimensions parameter can be: - A single dimension: Data[Time] - - Multiple dimensions: Data[Time, Scenario] + - Multiple dimensions: Data[Time, Period, Scenario] The type hint communicates that data can have **at most** these dimensions. Actual data can be: From f188f042755f47fbb82359515f2945f840783525 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 18:59:09 +0100 Subject: [PATCH 13/35] Add EffectData type --- flixopt/__init__.py | 5 +- flixopt/components.py | 30 +++++------ flixopt/core.py | 10 ++-- flixopt/effects.py | 88 ++++++++++++++++++++++++------- flixopt/elements.py | 20 ++++---- flixopt/features.py | 5 +- flixopt/interface.py | 26 +++++----- flixopt/types.py | 117 +++++++++++++++++++++++++++++++++++++----- 8 files changed, 224 insertions(+), 77 deletions(-) diff --git a/flixopt/__init__.py b/flixopt/__init__.py index f744d05a3..47838745a 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -41,8 +41,11 @@ solvers, ) +# Effect-specific types +from .effects import PeriodicEffectsUser, TemporalEffectsUser + # Type system for dimension-aware type hints -from .types import BoolData, Data, NumericData, Period, Scalar, Scenario, Time +from .types import BoolData, Data, EffectData, NumericData, Period, Scalar, Scenario, Time # === Runtime warning suppression for third-party libraries === # These warnings are from dependencies and cannot be fixed by end users. diff --git a/flixopt/components.py b/flixopt/components.py index b91f219d9..2c0239559 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -23,7 +23,7 @@ import linopy from .flow_system import FlowSystem - from .types import Data, Period, Scenario, Time + from .types import Data, NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -170,7 +170,7 @@ def __init__( inputs: list[Flow], outputs: list[Flow], on_off_parameters: OnOffParameters | None = None, - conversion_factors: list[dict[str, Data[Time, Scenario]]] | None = None, + conversion_factors: list[dict[str, NumericData[Time, Scenario]]] | None = None, piecewise_conversion: PiecewiseConversion | None = None, meta_data: dict | None = None, ): @@ -387,17 +387,17 @@ def __init__( label: str, charging: Flow, discharging: Flow, - capacity_in_flow_hours: Data[Period, Scenario] | InvestParameters, - relative_minimum_charge_state: Data[Time, Period, Scenario] = 0, - relative_maximum_charge_state: Data[Time, Period, Scenario] = 1, - initial_charge_state: Data[Period, Scenario] | Literal['lastValueOfSim'] = 0, - minimal_final_charge_state: Data[Period, Scenario] | None = None, - maximal_final_charge_state: Data[Period, Scenario] | None = None, - relative_minimum_final_charge_state: Data[Period, Scenario] | None = None, - relative_maximum_final_charge_state: Data[Period, Scenario] | None = None, - eta_charge: Data[Time, Period, Scenario] = 1, - eta_discharge: Data[Time, Period, Scenario] = 1, - relative_loss_per_hour: Data[Time, Period, Scenario] = 0, + capacity_in_flow_hours: NumericData[Period, Scenario] | InvestParameters, + relative_minimum_charge_state: NumericData[Time, Period, Scenario] = 0, + relative_maximum_charge_state: NumericData[Time, Period, Scenario] = 1, + initial_charge_state: NumericData[Period, Scenario] | Literal['lastValueOfSim'] = 0, + minimal_final_charge_state: NumericData[Period, Scenario] | None = None, + maximal_final_charge_state: NumericData[Period, Scenario] | None = None, + relative_minimum_final_charge_state: NumericData[Period, Scenario] | None = None, + relative_maximum_final_charge_state: NumericData[Period, Scenario] | None = None, + eta_charge: NumericData[Time, Period, Scenario] = 1, + eta_discharge: NumericData[Time, Period, Scenario] = 1, + relative_loss_per_hour: NumericData[Time, Period, Scenario] = 0, prevent_simultaneous_charge_and_discharge: bool = True, balanced: bool = False, meta_data: dict | None = None, @@ -664,8 +664,8 @@ def __init__( out1: Flow, in2: Flow | None = None, out2: Flow | None = None, - relative_losses: Data[Time, Period, Scenario] | None = None, - absolute_losses: Data[Time, Period, Scenario] | None = None, + relative_losses: NumericData[Time, Period, Scenario] | None = None, + absolute_losses: NumericData[Time, Period, Scenario] | None = None, on_off_parameters: OnOffParameters = None, prevent_simultaneous_flows_in_both_directions: bool = True, balanced: bool = False, diff --git a/flixopt/core.py b/flixopt/core.py index 1b8e1a660..cba519223 100644 --- a/flixopt/core.py +++ b/flixopt/core.py @@ -12,21 +12,21 @@ import pandas as pd import xarray as xr -from flixopt.types import Data, Period, Scalar, Scenario, Time +from flixopt.types import Data, NumericData, Period, Scalar, Scenario, Time logger = logging.getLogger('flixopt') # Legacy type aliases (kept for backward compatibility) -# These are being replaced by dimension-aware Data[...] types +# These are being replaced by dimension-aware NumericData[...] types Scalar = Scalar """A single number, either integer or float.""" -PeriodicDataUser = Data[Period, Scenario] +PeriodicDataUser = NumericData[Period, Scenario] """ User data which has no time dimension. Internally converted to a Scalar or an xr.DataArray without a time dimension. .. deprecated:: - Use dimension-aware types instead: `Data[Period, Scenario]` or `Data[Scenario]` + Use dimension-aware types instead: `NumericData[Period, Scenario]` or `NumericData[Scenario]` """ PeriodicData = xr.DataArray @@ -166,7 +166,7 @@ def agg_weight(self): User data which might have a time dimension. Internally converted to an xr.DataArray with time dimension. .. deprecated:: - Use dimension-aware types instead: `Data[Time]`, `Data[Time, Scenario]`, or `Data[Time, Period, Scenario]` + Use dimension-aware types instead: `NumericData[Time]`, `NumericData[Time, Scenario]`, or `NumericData[Time, Period, Scenario]` """ TemporalData = xr.DataArray | TimeSeriesData diff --git a/flixopt/effects.py b/flixopt/effects.py index 9c388fe2c..20e970dda 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -20,7 +20,7 @@ from .core import PeriodicDataUser, Scalar, TemporalData, TemporalDataUser from .features import ShareAllocationModel from .structure import Element, ElementContainer, ElementModel, FlowSystemModel, Submodel, register_class_for_io -from .types import Data, Period, Scenario, Time +from .types import Data, EffectData, NumericData, Period, Scenario, Time if TYPE_CHECKING: from collections.abc import Iterator @@ -53,17 +53,27 @@ class Effect(Element): is_objective: If True, this effect serves as the optimization objective function. Only one effect can be marked as objective per optimization. share_from_temporal: Temporal cross-effect contributions. - Maps temporal contributions from other effects to this effect + Maps temporal contributions from other effects to this effect. + Type: `TemporalEffectsUser` (single value or dict with dimensions [Time, Period, Scenario]) share_from_periodic: Periodic cross-effect contributions. Maps periodic contributions from other effects to this effect. + Type: `PeriodicEffectsUser` (single value or dict with dimensions [Period, Scenario]) minimum_temporal: Minimum allowed total contribution across all timesteps. + Type: `NumericData[Period, Scenario]` (sum over time, can vary by period/scenario) maximum_temporal: Maximum allowed total contribution across all timesteps. + Type: `NumericData[Period, Scenario]` (sum over time, can vary by period/scenario) minimum_per_hour: Minimum allowed contribution per hour. + Type: `NumericData[Time, Period, Scenario]` (per-timestep constraint, can vary by period) maximum_per_hour: Maximum allowed contribution per hour. + Type: `NumericData[Time, Period, Scenario]` (per-timestep constraint, can vary by period) minimum_periodic: Minimum allowed total periodic contribution. + Type: `NumericData[Period, Scenario]` (periodic constraint) maximum_periodic: Maximum allowed total periodic contribution. + Type: `NumericData[Period, Scenario]` (periodic constraint) minimum_total: Minimum allowed total effect (temporal + periodic combined). + Type: `NumericData[Period, Scenario]` (total constraint per period) maximum_total: Maximum allowed total effect (temporal + periodic combined). + Type: `NumericData[Period, Scenario]` (total constraint per period) meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -173,14 +183,14 @@ def __init__( is_objective: bool = False, share_from_temporal: TemporalEffectsUser | None = None, share_from_periodic: PeriodicEffectsUser | None = None, - minimum_temporal: PeriodicEffectsUser | None = None, - maximum_temporal: PeriodicEffectsUser | None = None, - minimum_periodic: PeriodicEffectsUser | None = None, - maximum_periodic: PeriodicEffectsUser | None = None, - minimum_per_hour: TemporalDataUser | None = None, - maximum_per_hour: TemporalDataUser | None = None, - minimum_total: Scalar | None = None, - maximum_total: Scalar | None = None, + minimum_temporal: NumericData[Period, Scenario] | None = None, + maximum_temporal: NumericData[Period, Scenario] | None = None, + minimum_periodic: NumericData[Period, Scenario] | None = None, + maximum_periodic: NumericData[Period, Scenario] | None = None, + minimum_per_hour: NumericData[Time, Period, Scenario] | None = None, + maximum_per_hour: NumericData[Time, Period, Scenario] | None = None, + minimum_total: NumericData[Period, Scenario] | None = None, + maximum_total: NumericData[Period, Scenario] | None = None, **kwargs, ): super().__init__(label, meta_data=meta_data) @@ -437,22 +447,64 @@ def _do_modeling(self): ) -TemporalEffectsUser = Data[Time, Period, Scenario] | dict[str, Data[Time, Scenario]] # User-specified Shares to Effects +TemporalEffectsUser = NumericData[Time, Period, Scenario] | dict[str, NumericData[Time, Period, Scenario]] """ -This datatype is used to define a temporal share to an effect by a certain attribute. +Temporal effects data: numeric values that can vary with time, periods, and scenarios. Can be: -- A single value (scalar, array, Series, DataFrame, DataArray) with at most [Time, Scenario] dimensions -- A dictionary mapping effect names to values with at most [Time, Scenario] dimensions +- A single numeric value (scalar, array, Series, DataFrame, DataArray) with at most [Time, Period, Scenario] dimensions + → Applied to the standard effect +- A dictionary mapping effect names to numeric values with at most [Time, Period, Scenario] dimensions + → Applied to named effects (e.g., {'costs': 10, 'CO2': 0.5}) + +Dimensions: +- Time: Hourly/timestep variation (e.g., varying electricity prices) +- Period: Multi-period planning horizon (e.g., costs in different years) +- Scenario: Scenario-based variation (e.g., high/low price scenarios) + +Note: Data can have any subset of these dimensions - scalars, 1D, 2D, or 3D arrays. + +Examples: + >>> # Single value for standard effect (broadcast to all dimensions) + >>> effects_per_flow_hour = 10.5 + >>> + >>> # Time-varying costs (same across periods and scenarios) + >>> effects_per_flow_hour = np.array([10, 12, 11, 10]) + >>> + >>> # Multiple effects with different dimensions + >>> effects_per_flow_hour = { + ... 'costs': 10.5, # Scalar + ... 'CO2': np.array([0.3, 0.4, 0.3]), # Time-varying + ... } """ -PeriodicEffectsUser = Data[Period, Scenario] | dict[str, Data[Period, Scenario]] # User-specified Shares to Effects +PeriodicEffectsUser = NumericData[Period, Scenario] | dict[str, NumericData[Period, Scenario]] """ -This datatype is used to define a periodic share to an effect by a certain attribute. +Periodic effects data: numeric values that can vary with planning periods and scenarios (no time dimension). Can be: -- A single value (scalar, array, Series, DataFrame, DataArray) with at most [Period, Scenario] dimensions -- A dictionary mapping effect names to values with at most [Period, Scenario] dimensions +- A single numeric value (scalar, array, Series, DataFrame, DataArray) with at most [Period, Scenario] dimensions + → Applied to the standard effect +- A dictionary mapping effect names to numeric values with at most [Period, Scenario] dimensions + → Applied to named effects (e.g., {'costs': 1000, 'CO2': 50}) + +Typical uses: +- Investment costs (vary by period but not time) +- Fixed operating costs (per period) +- Retirement effects + +Examples: + >>> # Fixed cost for investment + >>> effects_of_investment = 1000 + >>> + >>> # Period-varying costs (e.g., different years) + >>> effects_of_investment = np.array([1000, 1200, 1100]) # Years 2020, 2025, 2030 + >>> + >>> # Multiple periodic effects + >>> effects_of_investment = { + ... 'costs': 1000, + ... 'CO2': 50, + ... } """ TemporalEffects = dict[str, TemporalData] # User-specified Shares to Effects diff --git a/flixopt/elements.py b/flixopt/elements.py index aa18a67ab..d9282ddc0 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -24,7 +24,7 @@ from .effects import TemporalEffectsUser from .flow_system import FlowSystem - from .types import Data, Period, Scenario, Time + from .types import Data, NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -420,17 +420,17 @@ def __init__( self, label: str, bus: str, - size: Scalar | InvestParameters = None, - fixed_relative_profile: Data[Time, Period, Scenario] | None = None, - relative_minimum: Data[Time, Period, Scenario] = 0, - relative_maximum: Data[Time, Period, Scenario] = 1, + size: NumericData[Period, Scenario] | InvestParameters = None, + fixed_relative_profile: NumericData[Time, Period, Scenario] | None = None, + relative_minimum: NumericData[Time, Period, Scenario] = 0, + relative_maximum: NumericData[Time, Period, Scenario] = 1, effects_per_flow_hour: TemporalEffectsUser | None = None, on_off_parameters: OnOffParameters | None = None, - flow_hours_total_max: Scalar | None = None, - flow_hours_total_min: Scalar | None = None, - load_factor_min: Scalar | None = None, - load_factor_max: Scalar | None = None, - previous_flow_rate: Scalar | list[Scalar] | None = None, + flow_hours_total_max: NumericData[Period, Scenario] | None = None, + flow_hours_total_min: NumericData[Period, Scenario] | None = None, + load_factor_min: NumericData[Period, Scenario] | None = None, + load_factor_max: NumericData[Period, Scenario] | None = None, + previous_flow_rate: NumericData[Period, Scenario] | list[Scalar] | None = None, meta_data: dict | None = None, ): super().__init__(label, meta_data=meta_data) diff --git a/flixopt/features.py b/flixopt/features.py index 0d1fc7784..e6d400556 100644 --- a/flixopt/features.py +++ b/flixopt/features.py @@ -17,6 +17,7 @@ if TYPE_CHECKING: from .core import FlowSystemDimensions, Scalar, TemporalData from .interface import InvestParameters, OnOffParameters, Piecewise + from .types import NumericData, Period, Scenario logger = logging.getLogger('flixopt') @@ -517,8 +518,8 @@ def __init__( dims: list[FlowSystemDimensions], label_of_element: str | None = None, label_of_model: str | None = None, - total_max: Scalar | None = None, - total_min: Scalar | None = None, + total_max: NumericData[Period, Scenario] | None = None, + total_min: NumericData[Period, Scenario] | None = None, max_per_hour: TemporalData | None = None, min_per_hour: TemporalData | None = None, ): diff --git a/flixopt/interface.py b/flixopt/interface.py index 1ff30b933..3f72f8122 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -22,7 +22,7 @@ from .core import PeriodicData, PeriodicDataUser, Scalar, TemporalDataUser from .effects import PeriodicEffectsUser, TemporalEffectsUser from .flow_system import FlowSystem - from .types import Data, Period, Scenario, Time + from .types import Data, NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -74,7 +74,7 @@ class Piece(Interface): """ - def __init__(self, start: Data[Time, Period, Scenario], end: Data[Time, Period, Scenario]): + def __init__(self, start: NumericData[Time, Period, Scenario], end: NumericData[Time, Period, Scenario]): self.start = start self.end = end self.has_time_dim = False @@ -875,15 +875,15 @@ class InvestParameters(Interface): def __init__( self, - fixed_size: Data[Period, Scenario] | None = None, - minimum_size: Data[Period, Scenario] | None = None, - maximum_size: Data[Period, Scenario] | None = None, + fixed_size: NumericData[Period, Scenario] | None = None, + minimum_size: NumericData[Period, Scenario] | None = None, + maximum_size: NumericData[Period, Scenario] | None = None, mandatory: bool = False, effects_of_investment: PeriodicEffectsUser | None = None, effects_of_investment_per_size: PeriodicEffectsUser | None = None, effects_of_retirement: PeriodicEffectsUser | None = None, piecewise_effects_of_investment: PiecewiseEffects | None = None, - linked_periods: Data[Period, Scenario] | tuple[int, int] | None = None, + linked_periods: NumericData[Period, Scenario] | tuple[int, int] | None = None, **kwargs, ): # Handle deprecated parameters using centralized helper @@ -1273,10 +1273,10 @@ def __init__( effects_per_running_hour: TemporalEffectsUser | None = None, on_hours_total_min: int | None = None, on_hours_total_max: int | None = None, - consecutive_on_hours_min: Data[Time, Period, Scenario] | None = None, - consecutive_on_hours_max: Data[Time, Period, Scenario] | None = None, - consecutive_off_hours_min: Data[Time, Period, Scenario] | None = None, - consecutive_off_hours_max: Data[Time, Period, Scenario] | None = None, + consecutive_on_hours_min: NumericData[Time, Period, Scenario] | None = None, + consecutive_on_hours_max: NumericData[Time, Period, Scenario] | None = None, + consecutive_off_hours_min: NumericData[Time, Period, Scenario] | None = None, + consecutive_off_hours_max: NumericData[Time, Period, Scenario] | None = None, switch_on_total_max: int | None = None, force_switch_on: bool = False, ): @@ -1286,13 +1286,13 @@ def __init__( self.effects_per_running_hour: TemporalEffectsUser = ( effects_per_running_hour if effects_per_running_hour is not None else {} ) - self.on_hours_total_min: Scalar = on_hours_total_min - self.on_hours_total_max: Scalar = on_hours_total_max + self.on_hours_total_min: NumericData[Period, Scenario] = on_hours_total_min + self.on_hours_total_max: NumericData[Period, Scenario] = on_hours_total_max self.consecutive_on_hours_min: TemporalDataUser = consecutive_on_hours_min self.consecutive_on_hours_max: TemporalDataUser = consecutive_on_hours_max self.consecutive_off_hours_min: TemporalDataUser = consecutive_off_hours_min self.consecutive_off_hours_max: TemporalDataUser = consecutive_off_hours_max - self.switch_on_total_max: Scalar = switch_on_total_max + self.switch_on_total_max: NumericData[Period, Scenario] = switch_on_total_max self.force_switch_on: bool = force_switch_on def transform_data(self, flow_system: FlowSystem, name_prefix: str = '') -> None: diff --git a/flixopt/types.py b/flixopt/types.py index 58bd61cd9..2f3a2754d 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -8,26 +8,26 @@ Key Concepts ------------ - Dimension markers (`Time`, `Period`, `Scenario`) represent the possible dimensions -- `Data[...]` generic type indicates the **maximum** dimensions data can have +- `NumericData[...]` generic type indicates the **maximum** dimensions data can have - Data can have any subset of the specified dimensions (including being scalar) - All standard input formats are supported (scalar, array, Series, DataFrame, DataArray) Examples -------- -Type hint `Data[Time]` accepts: +Type hint `NumericData[Time]` accepts: - Scalar: `0.5` (broadcast to all timesteps) - 1D array: `np.array([1, 2, 3])` (matched to time dimension) - pandas Series: with DatetimeIndex matching flow system - xarray DataArray: with 'time' dimension -Type hint `Data[Time, Scenario]` accepts: +Type hint `NumericData[Time, Scenario]` accepts: - Scalar: `100` (broadcast to all time and scenario combinations) - 1D array: matched to time OR scenario dimension - 2D array: matched to both dimensions - pandas DataFrame: columns as scenarios, index as time - xarray DataArray: with any subset of 'time', 'scenario' dimensions -Type hint `Data[Period, Scenario]` (periodic data, no time): +Type hint `NumericData[Period, Scenario]` (periodic data, no time): - Used for investment parameters that vary by planning period - Accepts scalars, arrays matching periods/scenarios, or DataArrays @@ -36,7 +36,7 @@ - Not converted to DataArray, stays as scalar """ -from typing import Any, TypeAlias +from typing import Any, TypeAlias, Union import numpy as np import pandas as pd @@ -63,15 +63,15 @@ class Scenario: class _NumericDataMeta(type): - """Metaclass for Data to enable subscript notation Data[Time, Scenario] for numeric data.""" + """Metaclass for Data to enable subscript notation NumericData[Time, Scenario] for numeric data.""" def __getitem__(cls, dimensions): """ Create a type hint showing maximum dimensions for numeric data. The dimensions parameter can be: - - A single dimension: Data[Time] - - Multiple dimensions: Data[Time, Period, Scenario] + - A single dimension: NumericData[Time] + - Multiple dimensions: NumericData[Time, Period, Scenario] The type hint communicates that data can have **at most** these dimensions. Actual data can be: @@ -86,8 +86,9 @@ def __getitem__(cls, dimensions): # of which dimensions are specified. The dimension parameters serve # as documentation rather than runtime validation. - # Return type that includes all possible numeric input formats - return int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray + # Return Union[] for better type checker compatibility (especially with | None) + # Using Union[] instead of | to avoid IDE warnings with "Type[...] | None" syntax + return Union[int, float, np.integer, np.floating, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray] # noqa: UP007 class _BoolDataMeta(type): @@ -99,8 +100,24 @@ def __getitem__(cls, dimensions): Same semantics as numeric Data, but for boolean values. """ - # Return type that includes all possible boolean input formats - return bool | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray + # Return Union[] for better type checker compatibility (especially with | None) + # Using Union[] instead of | to avoid IDE warnings with "Type[...] | None" syntax + return Union[bool, np.bool_, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray] # noqa: UP007 + + +class _EffectDataMeta(type): + """Metaclass for EffectData to enable subscript notation EffectData[Time, Period, Scenario] for effect data.""" + + def __getitem__(cls, dimensions): + """ + Create a type hint showing maximum dimensions for effect data. + + Effect data is numeric data specifically for effects, with full dimensional support. + Same as NumericData but semantically distinct for effect-related parameters. + """ + # Return Union[] for better type checker compatibility (especially with | None) + # Using Union[] instead of | to avoid IDE warnings with "Type[...] | None" syntax + return Union[int, float, np.integer, np.floating, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray] # noqa: UP007 class Data(metaclass=_NumericDataMeta): @@ -222,7 +239,80 @@ def __init__(self): raise TypeError('BoolData is a type hint only and cannot be instantiated') -# Public alias for Data (for clarity and symmetry with BoolData) +class EffectData(metaclass=_EffectDataMeta): + """ + Generic type for effect data that can have various dimensions. + + EffectData is semantically identical to NumericData but specifically intended for + effect-related parameters. It supports the full dimensional space including Time, + Period, and Scenario dimensions, making it ideal for effect contributions, constraints, + and cross-effect relationships. + + Use subscript notation to specify the maximum dimensions: + - `EffectData[Time]`: Time-varying effect data + - `EffectData[Period, Scenario]`: Periodic effect data + - `EffectData[Time, Period, Scenario]`: Full dimensional effect data + + Semantics: "At Most" Dimensions + -------------------------------- + When you see `EffectData[Time, Period, Scenario]`, it means the data can have: + - No dimensions (scalar): broadcast to all time, period, and scenario values + - Any subset: just time, just period, just scenario, time+period, etc. + - All dimensions: full 3D data + + Accepted Input Formats (Numeric) + --------------------------------- + All dimension combinations accept these formats: + - Scalars: int, float (including numpy types) + - Arrays: numpy ndarray with numeric dtype (matched by length/shape to dimensions) + - pandas Series: matched by index to dimension coordinates + - pandas DataFrame: typically columns=scenarios/periods, index=time + - xarray DataArray: used directly with dimension validation + + Typical Use Cases + ----------------- + - Effect contributions varying by time, period, and scenario + - Per-hour constraints that tighten over planning periods + - Cross-effect pricing (e.g., escalating carbon prices) + - Multi-period optimization with temporal detail + + Examples + -------- + >>> # Scalar effect cost (broadcast to all dimensions) + >>> cost: EffectData[Time, Period, Scenario] = 10.5 + >>> + >>> # Time-varying emissions + >>> emissions: EffectData[Time, Period, Scenario] = np.array([100, 120, 110]) + >>> + >>> # Period-varying carbon price (escalating over years) + >>> carbon_price: EffectData[Period] = np.array([0.1, 0.2, 0.3]) # €/kg in 2020, 2025, 2030 + >>> + >>> # Full 3D effect data + >>> import xarray as xr + >>> full_data: EffectData[Time, Period, Scenario] = xr.DataArray( + ... data=np.random.rand(24, 3, 2), # 24 hours × 3 periods × 2 scenarios + ... dims=['time', 'period', 'scenario'], + ... ) + + Note + ---- + EffectData is functionally identical to NumericData. The distinction is semantic: + use EffectData for effect-related parameters to make code intent clearer. + + See Also + -------- + NumericData : General numeric data with dimensions + BoolData : For boolean data with dimensions + TemporalEffectsUser : Effect type for temporal contributions (dict or single value) + PeriodicEffectsUser : Effect type for periodic contributions (dict or single value) + """ + + # This class is not meant to be instantiated, only used for type hints + def __init__(self): + raise TypeError('EffectData is a type hint only and cannot be instantiated') + + +# Public alias for Data (for clarity and symmetry with BoolData and EffectData) NumericData = Data """Public type for numeric data with dimensions. Alias for the internal `Data` class.""" @@ -233,6 +323,7 @@ def __init__(self): __all__ = [ 'NumericData', # Primary public type for numeric data 'BoolData', # Primary public type for boolean data + 'EffectData', # Primary public type for effect data (semantic variant of NumericData) 'Data', # Also exported (internal base class, can be used as shorthand) 'Time', 'Period', From d8bf7f22126ea8f63db29bd2a874c1c94c13e53c Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 20:56:15 +0100 Subject: [PATCH 14/35] EffectData Type - Complete Redesign --- flixopt/components.py | 10 ++--- flixopt/core.py | 37 +++++----------- flixopt/effects.py | 12 +++-- flixopt/elements.py | 2 +- flixopt/flow_system.py | 12 +++-- flixopt/interface.py | 13 +++--- flixopt/linear_converters.py | 23 +++++----- flixopt/types.py | 85 ++++++++++++++++++++++++------------ 8 files changed, 104 insertions(+), 90 deletions(-) diff --git a/flixopt/components.py b/flixopt/components.py index 2c0239559..fd060283c 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -414,8 +414,8 @@ def __init__( self.charging = charging self.discharging = discharging self.capacity_in_flow_hours = capacity_in_flow_hours - self.relative_minimum_charge_state: TemporalDataUser = relative_minimum_charge_state - self.relative_maximum_charge_state: TemporalDataUser = relative_maximum_charge_state + self.relative_minimum_charge_state: NumericData[Time, Period, Scenario] = relative_minimum_charge_state + self.relative_maximum_charge_state: NumericData[Time, Period, Scenario] = relative_maximum_charge_state self.relative_minimum_final_charge_state = relative_minimum_final_charge_state self.relative_maximum_final_charge_state = relative_maximum_final_charge_state @@ -424,9 +424,9 @@ def __init__( self.minimal_final_charge_state = minimal_final_charge_state self.maximal_final_charge_state = maximal_final_charge_state - self.eta_charge: TemporalDataUser = eta_charge - self.eta_discharge: TemporalDataUser = eta_discharge - self.relative_loss_per_hour: TemporalDataUser = relative_loss_per_hour + self.eta_charge: NumericData[Time, Period, Scenario] = eta_charge + self.eta_discharge: NumericData[Time, Period, Scenario] = eta_discharge + self.relative_loss_per_hour: NumericData[Time, Period, Scenario] = relative_loss_per_hour self.prevent_simultaneous_charge_and_discharge = prevent_simultaneous_charge_and_discharge self.balanced = balanced diff --git a/flixopt/core.py b/flixopt/core.py index cba519223..8f9cc8827 100644 --- a/flixopt/core.py +++ b/flixopt/core.py @@ -16,22 +16,6 @@ logger = logging.getLogger('flixopt') -# Legacy type aliases (kept for backward compatibility) -# These are being replaced by dimension-aware NumericData[...] types -Scalar = Scalar -"""A single number, either integer or float.""" - -PeriodicDataUser = NumericData[Period, Scenario] -""" -User data which has no time dimension. Internally converted to a Scalar or an xr.DataArray without a time dimension. - -.. deprecated:: - Use dimension-aware types instead: `NumericData[Period, Scenario]` or `NumericData[Scenario]` -""" - -PeriodicData = xr.DataArray -"""Internally used datatypes for periodic data.""" - FlowSystemDimensions = Literal['time', 'period', 'scenario'] """Possible dimensions of a FlowSystem.""" @@ -159,14 +143,19 @@ def agg_weight(self): return self.aggregation_weight -TemporalDataUser = ( - int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray | TimeSeriesData -) +TemporalDataUser = NumericData[Time, Scenario] """ User data which might have a time dimension. Internally converted to an xr.DataArray with time dimension. -.. deprecated:: - Use dimension-aware types instead: `NumericData[Time]`, `NumericData[Time, Scenario]`, or `NumericData[Time, Period, Scenario]` +Supports data with at most [Time, Scenario] dimensions. For periodic data (no time dimension), use PeriodicDataUser. +For data with all three dimensions [Time, Period, Scenario], use NumericData[Time, Period, Scenario] directly. +""" + +PeriodicDataUser = NumericData[Period, Scenario] +""" +User data for periodic parameters (no time dimension). Internally converted to an xr.DataArray. + +Supports data with at most [Period, Scenario] dimensions. For temporal data (with time), use TemporalDataUser. """ TemporalData = xr.DataArray | TimeSeriesData @@ -651,9 +640,3 @@ def drop_constant_arrays(ds: xr.Dataset, dim: str = 'time', drop_arrays_without_ ) return ds.drop_vars(drop_vars) - - -# Backward compatibility aliases -# TODO: Needed? -NonTemporalDataUser = PeriodicDataUser -NonTemporalData = PeriodicData diff --git a/flixopt/effects.py b/flixopt/effects.py index 20e970dda..a58ce94e3 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -17,7 +17,7 @@ import xarray as xr from . import io as fx_io -from .core import PeriodicDataUser, Scalar, TemporalData, TemporalDataUser +from .core import Scalar, TemporalData, TemporalDataUser from .features import ShareAllocationModel from .structure import Element, ElementContainer, ElementModel, FlowSystemModel, Submodel, register_class_for_io from .types import Data, EffectData, NumericData, Period, Scenario, Time @@ -447,10 +447,12 @@ def _do_modeling(self): ) -TemporalEffectsUser = NumericData[Time, Period, Scenario] | dict[str, NumericData[Time, Period, Scenario]] +TemporalEffectsUser = EffectData[Time, Period, Scenario] """ Temporal effects data: numeric values that can vary with time, periods, and scenarios. +Type: `EffectData[Time, Period, Scenario]` = `NumericData[Time, Period, Scenario] | dict[str, NumericData[Time, Period, Scenario]]` + Can be: - A single numeric value (scalar, array, Series, DataFrame, DataArray) with at most [Time, Period, Scenario] dimensions → Applied to the standard effect @@ -478,10 +480,12 @@ def _do_modeling(self): ... } """ -PeriodicEffectsUser = NumericData[Period, Scenario] | dict[str, NumericData[Period, Scenario]] +PeriodicEffectsUser = EffectData[Period, Scenario] """ Periodic effects data: numeric values that can vary with planning periods and scenarios (no time dimension). +Type: `EffectData[Period, Scenario]` = `NumericData[Period, Scenario] | dict[str, NumericData[Period, Scenario]]` + Can be: - A single numeric value (scalar, array, Series, DataFrame, DataArray) with at most [Period, Scenario] dimensions → Applied to the standard effect @@ -556,7 +560,7 @@ def add_effects(self, *effects: Effect) -> None: def create_effect_values_dict( self, effect_values_user: PeriodicEffectsUser | TemporalEffectsUser - ) -> dict[str, Scalar | TemporalDataUser] | None: + ) -> dict[str, Scalar | NumericData[Time, Period, Scenario]] | None: """Converts effect values into a dictionary. If a scalar is provided, it is associated with a default effect type. Examples: diff --git a/flixopt/elements.py b/flixopt/elements.py index d9282ddc0..16cc2513e 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -229,7 +229,7 @@ class Bus(Element): def __init__( self, label: str, - excess_penalty_per_flow_hour: TemporalDataUser | None = 1e5, + excess_penalty_per_flow_hour: NumericData[Time, Period, Scenario] | None = 1e5, meta_data: dict | None = None, ): super().__init__(label, meta_data=meta_data) diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 1fc280226..250f3fc20 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -20,10 +20,6 @@ ConversionError, DataConverter, FlowSystemDimensions, - PeriodicData, - PeriodicDataUser, - TemporalData, - TemporalDataUser, TimeSeriesData, ) from .effects import ( @@ -43,6 +39,8 @@ import pyvis + from .types import Data, Period, Scenario, Time + logger = logging.getLogger('flixopt') @@ -168,7 +166,7 @@ def __init__( scenarios: pd.Index | None = None, hours_of_last_timestep: int | float | None = None, hours_of_previous_timesteps: int | float | np.ndarray | None = None, - weights: PeriodicDataUser | None = None, + weights: Data[Period, Scenario] | None = None, scenario_independent_sizes: bool | list[str] = True, scenario_independent_flow_rates: bool | list[str] = False, ): @@ -532,9 +530,9 @@ def to_json(self, path: str | pathlib.Path): def fit_to_model_coords( self, name: str, - data: TemporalDataUser | PeriodicDataUser | None, + data: Data[Time, Period, Scenario] | None, dims: Collection[FlowSystemDimensions] | None = None, - ) -> TemporalData | PeriodicData | None: + ) -> xr.DataArray | None: """ Fit data to model coordinate system (currently time, but extensible). diff --git a/flixopt/interface.py b/flixopt/interface.py index 3f72f8122..5cc2d2683 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -19,7 +19,6 @@ if TYPE_CHECKING: # for type checking and preventing circular imports from collections.abc import Iterator - from .core import PeriodicData, PeriodicDataUser, Scalar, TemporalDataUser from .effects import PeriodicEffectsUser, TemporalEffectsUser from .flow_system import FlowSystem from .types import Data, NumericData, Period, Scenario, Time @@ -1045,11 +1044,11 @@ def piecewise_effects(self) -> PiecewiseEffects | None: return self.piecewise_effects_of_investment @property - def minimum_or_fixed_size(self) -> PeriodicData: + def minimum_or_fixed_size(self) -> Data[Period, Scenario]: return self.fixed_size if self.fixed_size is not None else self.minimum_size @property - def maximum_or_fixed_size(self) -> PeriodicData: + def maximum_or_fixed_size(self) -> Data[Period, Scenario]: return self.fixed_size if self.fixed_size is not None else self.maximum_size def format_for_repr(self) -> str: @@ -1288,10 +1287,10 @@ def __init__( ) self.on_hours_total_min: NumericData[Period, Scenario] = on_hours_total_min self.on_hours_total_max: NumericData[Period, Scenario] = on_hours_total_max - self.consecutive_on_hours_min: TemporalDataUser = consecutive_on_hours_min - self.consecutive_on_hours_max: TemporalDataUser = consecutive_on_hours_max - self.consecutive_off_hours_min: TemporalDataUser = consecutive_off_hours_min - self.consecutive_off_hours_max: TemporalDataUser = consecutive_off_hours_max + self.consecutive_on_hours_min: NumericData[Time, Period, Scenario] = consecutive_on_hours_min + self.consecutive_on_hours_max: NumericData[Time, Period, Scenario] = consecutive_on_hours_max + self.consecutive_off_hours_min: NumericData[Time, Period, Scenario] = consecutive_off_hours_min + self.consecutive_off_hours_max: NumericData[Time, Period, Scenario] = consecutive_off_hours_max self.switch_on_total_max: NumericData[Period, Scenario] = switch_on_total_max self.force_switch_on: bool = force_switch_on diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 47c545506..661cdc030 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -10,12 +10,13 @@ import numpy as np from .components import LinearConverter -from .core import TemporalDataUser, TimeSeriesData +from .core import TimeSeriesData from .structure import register_class_for_io if TYPE_CHECKING: from .elements import Flow from .interface import OnOffParameters + from .types import NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -76,7 +77,7 @@ class Boiler(LinearConverter): def __init__( self, label: str, - eta: TemporalDataUser, + eta: NumericData[Time, Period, Scenario], Q_fu: Flow, Q_th: Flow, on_off_parameters: OnOffParameters | None = None, @@ -163,7 +164,7 @@ class Power2Heat(LinearConverter): def __init__( self, label: str, - eta: TemporalDataUser, + eta: NumericData[Time, Period, Scenario], P_el: Flow, Q_th: Flow, on_off_parameters: OnOffParameters | None = None, @@ -250,7 +251,7 @@ class HeatPump(LinearConverter): def __init__( self, label: str, - COP: TemporalDataUser, + COP: NumericData[Time, Period, Scenario], P_el: Flow, Q_th: Flow, on_off_parameters: OnOffParameters | None = None, @@ -339,7 +340,7 @@ class CoolingTower(LinearConverter): def __init__( self, label: str, - specific_electricity_demand: TemporalDataUser, + specific_electricity_demand: NumericData[Time, Period, Scenario], P_el: Flow, Q_th: Flow, on_off_parameters: OnOffParameters | None = None, @@ -437,8 +438,8 @@ class CHP(LinearConverter): def __init__( self, label: str, - eta_th: TemporalDataUser, - eta_el: TemporalDataUser, + eta_th: NumericData[Time, Period, Scenario], + eta_el: NumericData[Time, Period, Scenario], Q_fu: Flow, P_el: Flow, Q_th: Flow, @@ -551,7 +552,7 @@ class HeatPumpWithSource(LinearConverter): def __init__( self, label: str, - COP: TemporalDataUser, + COP: NumericData[Time, Period, Scenario], P_el: Flow, Q_ab: Flow, Q_th: Flow, @@ -589,11 +590,11 @@ def COP(self, value): # noqa: N802 def check_bounds( - value: TemporalDataUser, + value: NumericData[Time, Period, Scenario], parameter_label: str, element_label: str, - lower_bound: TemporalDataUser, - upper_bound: TemporalDataUser, + lower_bound: NumericData[Time, Period, Scenario], + upper_bound: NumericData[Time, Period, Scenario], ) -> None: """ Check if the value is within the bounds. The bounds are exclusive. diff --git a/flixopt/types.py b/flixopt/types.py index 2f3a2754d..5a98cd442 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -112,12 +112,27 @@ def __getitem__(cls, dimensions): """ Create a type hint showing maximum dimensions for effect data. - Effect data is numeric data specifically for effects, with full dimensional support. - Same as NumericData but semantically distinct for effect-related parameters. + Effect data can be either: + - A single numeric value (scalar, array, Series, DataFrame, DataArray) + - A dict with string keys mapping to numeric values + + This matches the pattern used for effects: either a single contribution or + a dictionary of named contributions. """ # Return Union[] for better type checker compatibility (especially with | None) # Using Union[] instead of | to avoid IDE warnings with "Type[...] | None" syntax - return Union[int, float, np.integer, np.floating, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray] # noqa: UP007 + # EffectData = NumericData | dict[str, NumericData] + return Union[ # noqa: UP007 + int, + float, + np.integer, + np.floating, + np.ndarray, + pd.Series, + pd.DataFrame, + xr.DataArray, + dict[str, Union[int, float, np.integer, np.floating, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray]], # noqa: UP007 + ] class Data(metaclass=_NumericDataMeta): @@ -241,18 +256,25 @@ def __init__(self): class EffectData(metaclass=_EffectDataMeta): """ - Generic type for effect data that can have various dimensions. + Generic type for effect data that can be a single value or a dictionary of values. - EffectData is semantically identical to NumericData but specifically intended for - effect-related parameters. It supports the full dimensional space including Time, - Period, and Scenario dimensions, making it ideal for effect contributions, constraints, - and cross-effect relationships. + EffectData represents the common pattern for effects: either a single numeric contribution + or a dictionary with string keys mapping to numeric contributions. This is useful for + specifying effects where you either have a single effect or multiple named effects. Use subscript notation to specify the maximum dimensions: - `EffectData[Time]`: Time-varying effect data - `EffectData[Period, Scenario]`: Periodic effect data - `EffectData[Time, Period, Scenario]`: Full dimensional effect data + Type Structure + -------------- + `EffectData[dims]` = `NumericData[dims] | dict[str, NumericData[dims]]` + + This means you can provide: + - A single numeric value (scalar, array, Series, DataFrame, DataArray) + - A dict mapping effect names to numeric values + Semantics: "At Most" Dimensions -------------------------------- When you see `EffectData[Time, Period, Scenario]`, it means the data can have: @@ -260,44 +282,51 @@ class EffectData(metaclass=_EffectDataMeta): - Any subset: just time, just period, just scenario, time+period, etc. - All dimensions: full 3D data - Accepted Input Formats (Numeric) - --------------------------------- - All dimension combinations accept these formats: + Accepted Input Formats + ---------------------- + Single value: - Scalars: int, float (including numpy types) - Arrays: numpy ndarray with numeric dtype (matched by length/shape to dimensions) - pandas Series: matched by index to dimension coordinates - pandas DataFrame: typically columns=scenarios/periods, index=time - xarray DataArray: used directly with dimension validation + Dictionary of values: + - dict[str, ] + Typical Use Cases ----------------- - - Effect contributions varying by time, period, and scenario - - Per-hour constraints that tighten over planning periods - - Cross-effect pricing (e.g., escalating carbon prices) - - Multi-period optimization with temporal detail + - Single effect: `EffectData[Time] = 10.5` or `np.array([10, 12, 11])` + - Multiple effects: `EffectData[Time] = {'CO2': 0.5, 'costs': 100}` + - Cross-effect relationships in Effect class + - Component effect contributions (effects_per_flow_hour, etc.) Examples -------- - >>> # Scalar effect cost (broadcast to all dimensions) - >>> cost: EffectData[Time, Period, Scenario] = 10.5 + >>> # Single scalar effect (broadcast to all dimensions) + >>> single_cost: EffectData[Time, Period, Scenario] = 10.5 >>> - >>> # Time-varying emissions - >>> emissions: EffectData[Time, Period, Scenario] = np.array([100, 120, 110]) + >>> # Single time-varying effect + >>> single_emissions: EffectData[Time] = np.array([100, 120, 110]) >>> - >>> # Period-varying carbon price (escalating over years) - >>> carbon_price: EffectData[Period] = np.array([0.1, 0.2, 0.3]) # €/kg in 2020, 2025, 2030 + >>> # Multiple named effects (dict) + >>> multiple_effects: EffectData[Time] = { + ... 'CO2': np.array([0.5, 0.6, 0.5]), + ... 'costs': 100, # scalar broadcast to all time + ... } >>> - >>> # Full 3D effect data - >>> import xarray as xr - >>> full_data: EffectData[Time, Period, Scenario] = xr.DataArray( - ... data=np.random.rand(24, 3, 2), # 24 hours × 3 periods × 2 scenarios - ... dims=['time', 'period', 'scenario'], + >>> # Cross-effect in Effect class + >>> cost_effect = Effect( + ... label='total_costs', + ... unit='€', + ... is_objective=True, + ... share_from_temporal={'CO2': 0.1}, # EffectData[Time, Period, Scenario] ... ) Note ---- - EffectData is functionally identical to NumericData. The distinction is semantic: - use EffectData for effect-related parameters to make code intent clearer. + EffectData = NumericData | dict[str, NumericData]. This pattern is specific to effects + and different from NumericData which only represents single numeric values. See Also -------- From bfc645efc2d10037d91f07586a5ba9c8455a42fc Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 21:00:48 +0100 Subject: [PATCH 15/35] Use NumericData instead of Data --- flixopt/__init__.py | 2 +- flixopt/components.py | 2 +- flixopt/core.py | 2 +- flixopt/effects.py | 2 +- flixopt/elements.py | 2 +- flixopt/flow_system.py | 6 +++--- flixopt/interface.py | 6 +++--- flixopt/types.py | 16 +++++----------- 8 files changed, 16 insertions(+), 22 deletions(-) diff --git a/flixopt/__init__.py b/flixopt/__init__.py index 47838745a..3984b5394 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -45,7 +45,7 @@ from .effects import PeriodicEffectsUser, TemporalEffectsUser # Type system for dimension-aware type hints -from .types import BoolData, Data, EffectData, NumericData, Period, Scalar, Scenario, Time +from .types import BoolData, EffectData, NumericData, Period, Scalar, Scenario, Time # === Runtime warning suppression for third-party libraries === # These warnings are from dependencies and cannot be fixed by end users. diff --git a/flixopt/components.py b/flixopt/components.py index fd060283c..632bb6ee9 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -23,7 +23,7 @@ import linopy from .flow_system import FlowSystem - from .types import Data, NumericData, Period, Scenario, Time + from .types import NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') diff --git a/flixopt/core.py b/flixopt/core.py index 8f9cc8827..8ec91e2e2 100644 --- a/flixopt/core.py +++ b/flixopt/core.py @@ -12,7 +12,7 @@ import pandas as pd import xarray as xr -from flixopt.types import Data, NumericData, Period, Scalar, Scenario, Time +from flixopt.types import NumericData, Period, Scalar, Scenario, Time logger = logging.getLogger('flixopt') diff --git a/flixopt/effects.py b/flixopt/effects.py index a58ce94e3..6aeb98451 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -20,7 +20,7 @@ from .core import Scalar, TemporalData, TemporalDataUser from .features import ShareAllocationModel from .structure import Element, ElementContainer, ElementModel, FlowSystemModel, Submodel, register_class_for_io -from .types import Data, EffectData, NumericData, Period, Scenario, Time +from .types import EffectData, NumericData, Period, Scenario, Time if TYPE_CHECKING: from collections.abc import Iterator diff --git a/flixopt/elements.py b/flixopt/elements.py index 16cc2513e..e97169f0a 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -24,7 +24,7 @@ from .effects import TemporalEffectsUser from .flow_system import FlowSystem - from .types import Data, NumericData, Period, Scenario, Time + from .types import NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 250f3fc20..9c195d926 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -39,7 +39,7 @@ import pyvis - from .types import Data, Period, Scenario, Time + from .types import BoolData, NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -166,7 +166,7 @@ def __init__( scenarios: pd.Index | None = None, hours_of_last_timestep: int | float | None = None, hours_of_previous_timesteps: int | float | np.ndarray | None = None, - weights: Data[Period, Scenario] | None = None, + weights: NumericData[Period, Scenario] | None = None, scenario_independent_sizes: bool | list[str] = True, scenario_independent_flow_rates: bool | list[str] = False, ): @@ -530,7 +530,7 @@ def to_json(self, path: str | pathlib.Path): def fit_to_model_coords( self, name: str, - data: Data[Time, Period, Scenario] | None, + data: NumericData[Time, Period, Scenario] | BoolData[Time, Period, Scenario] | None, dims: Collection[FlowSystemDimensions] | None = None, ) -> xr.DataArray | None: """ diff --git a/flixopt/interface.py b/flixopt/interface.py index 5cc2d2683..541357e48 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -21,7 +21,7 @@ from .effects import PeriodicEffectsUser, TemporalEffectsUser from .flow_system import FlowSystem - from .types import Data, NumericData, Period, Scenario, Time + from .types import NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -1044,11 +1044,11 @@ def piecewise_effects(self) -> PiecewiseEffects | None: return self.piecewise_effects_of_investment @property - def minimum_or_fixed_size(self) -> Data[Period, Scenario]: + def minimum_or_fixed_size(self) -> NumericData[Period, Scenario]: return self.fixed_size if self.fixed_size is not None else self.minimum_size @property - def maximum_or_fixed_size(self) -> Data[Period, Scenario]: + def maximum_or_fixed_size(self) -> NumericData[Period, Scenario]: return self.fixed_size if self.fixed_size is not None else self.maximum_size def format_for_repr(self) -> str: diff --git a/flixopt/types.py b/flixopt/types.py index 5a98cd442..a7527c835 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -63,7 +63,7 @@ class Scenario: class _NumericDataMeta(type): - """Metaclass for Data to enable subscript notation NumericData[Time, Scenario] for numeric data.""" + """Metaclass for NumericData to enable subscript notation NumericData[Time, Scenario] for numeric data.""" def __getitem__(cls, dimensions): """ @@ -98,7 +98,7 @@ def __getitem__(cls, dimensions): """ Create a type hint showing maximum dimensions for boolean data. - Same semantics as numeric Data, but for boolean values. + Same semantics as NumericData, but for boolean values. """ # Return Union[] for better type checker compatibility (especially with | None) # Using Union[] instead of | to avoid IDE warnings with "Type[...] | None" syntax @@ -135,7 +135,7 @@ def __getitem__(cls, dimensions): ] -class Data(metaclass=_NumericDataMeta): +class NumericData(metaclass=_NumericDataMeta): """ Base type for numeric data that can have various dimensions. @@ -180,7 +180,6 @@ class Data(metaclass=_NumericDataMeta): See Also -------- - NumericData : Public alias for this class BoolData : For boolean data with dimensions DataConverter.to_dataarray : The conversion implementation FlowSystem.fit_to_model_coords : Fits data to the model's coordinate system @@ -188,7 +187,7 @@ class Data(metaclass=_NumericDataMeta): # This class is not meant to be instantiated, only used for type hints def __init__(self): - raise TypeError('Data is a type hint only and cannot be instantiated') + raise TypeError('NumericData is a type hint only and cannot be instantiated') class BoolData(metaclass=_BoolDataMeta): @@ -202,7 +201,7 @@ class BoolData(metaclass=_BoolDataMeta): Semantics: "At Most" Dimensions -------------------------------- - Same semantics as Data, but for boolean values. + Same semantics as NumericData, but for boolean values. When you see `BoolData[Time, Scenario]`, the data can have: - No dimensions (scalar bool): broadcast to all time and scenario values - Just 'time': broadcast across scenarios @@ -341,10 +340,6 @@ def __init__(self): raise TypeError('EffectData is a type hint only and cannot be instantiated') -# Public alias for Data (for clarity and symmetry with BoolData and EffectData) -NumericData = Data -"""Public type for numeric data with dimensions. Alias for the internal `Data` class.""" - # Simple scalar type for dimension-less numeric values Scalar: TypeAlias = int | float | np.integer | np.floating @@ -353,7 +348,6 @@ def __init__(self): 'NumericData', # Primary public type for numeric data 'BoolData', # Primary public type for boolean data 'EffectData', # Primary public type for effect data (semantic variant of NumericData) - 'Data', # Also exported (internal base class, can be used as shorthand) 'Time', 'Period', 'Scenario', From 2a06dfb2346a90de992d02d2a628affb80ada918 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 21:12:56 +0100 Subject: [PATCH 16/35] Update type hints --- flixopt/__init__.py | 3 -- flixopt/core.py | 2 +- flixopt/effects.py | 95 +++++------------------------------------- flixopt/elements.py | 5 +-- flixopt/flow_system.py | 15 ++----- flixopt/interface.py | 39 +++++++---------- flixopt/types.py | 23 ++-------- 7 files changed, 37 insertions(+), 145 deletions(-) diff --git a/flixopt/__init__.py b/flixopt/__init__.py index 3984b5394..52560bc85 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -41,9 +41,6 @@ solvers, ) -# Effect-specific types -from .effects import PeriodicEffectsUser, TemporalEffectsUser - # Type system for dimension-aware type hints from .types import BoolData, EffectData, NumericData, Period, Scalar, Scenario, Time diff --git a/flixopt/core.py b/flixopt/core.py index 8ec91e2e2..406a6ea7c 100644 --- a/flixopt/core.py +++ b/flixopt/core.py @@ -12,7 +12,7 @@ import pandas as pd import xarray as xr -from flixopt.types import NumericData, Period, Scalar, Scenario, Time +from flixopt.types import NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') diff --git a/flixopt/effects.py b/flixopt/effects.py index 6aeb98451..120158b09 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -16,16 +16,14 @@ import numpy as np import xarray as xr -from . import io as fx_io -from .core import Scalar, TemporalData, TemporalDataUser from .features import ShareAllocationModel from .structure import Element, ElementContainer, ElementModel, FlowSystemModel, Submodel, register_class_for_io -from .types import EffectData, NumericData, Period, Scenario, Time if TYPE_CHECKING: from collections.abc import Iterator from .flow_system import FlowSystem + from .types import EffectData, NumericData, Period, Scalar, Scenario, Time logger = logging.getLogger('flixopt') @@ -181,8 +179,8 @@ def __init__( meta_data: dict | None = None, is_standard: bool = False, is_objective: bool = False, - share_from_temporal: TemporalEffectsUser | None = None, - share_from_periodic: PeriodicEffectsUser | None = None, + share_from_temporal: EffectData[Time, Period, Scenario] | None = None, + share_from_periodic: EffectData[Period, Scenario] | None = None, minimum_temporal: NumericData[Period, Scenario] | None = None, maximum_temporal: NumericData[Period, Scenario] | None = None, minimum_periodic: NumericData[Period, Scenario] | None = None, @@ -198,8 +196,12 @@ def __init__( self.description = description self.is_standard = is_standard self.is_objective = is_objective - self.share_from_temporal: TemporalEffectsUser = share_from_temporal if share_from_temporal is not None else {} - self.share_from_periodic: PeriodicEffectsUser = share_from_periodic if share_from_periodic is not None else {} + self.share_from_temporal: EffectData[Time, Period, Scenario] = ( + share_from_temporal if share_from_temporal is not None else {} + ) + self.share_from_periodic: EffectData[Period, Scenario] = ( + share_from_periodic if share_from_periodic is not None else {} + ) # Handle backwards compatibility for deprecated parameters using centralized helper minimum_temporal = self._handle_deprecated_kwarg( @@ -447,76 +449,6 @@ def _do_modeling(self): ) -TemporalEffectsUser = EffectData[Time, Period, Scenario] -""" -Temporal effects data: numeric values that can vary with time, periods, and scenarios. - -Type: `EffectData[Time, Period, Scenario]` = `NumericData[Time, Period, Scenario] | dict[str, NumericData[Time, Period, Scenario]]` - -Can be: -- A single numeric value (scalar, array, Series, DataFrame, DataArray) with at most [Time, Period, Scenario] dimensions - → Applied to the standard effect -- A dictionary mapping effect names to numeric values with at most [Time, Period, Scenario] dimensions - → Applied to named effects (e.g., {'costs': 10, 'CO2': 0.5}) - -Dimensions: -- Time: Hourly/timestep variation (e.g., varying electricity prices) -- Period: Multi-period planning horizon (e.g., costs in different years) -- Scenario: Scenario-based variation (e.g., high/low price scenarios) - -Note: Data can have any subset of these dimensions - scalars, 1D, 2D, or 3D arrays. - -Examples: - >>> # Single value for standard effect (broadcast to all dimensions) - >>> effects_per_flow_hour = 10.5 - >>> - >>> # Time-varying costs (same across periods and scenarios) - >>> effects_per_flow_hour = np.array([10, 12, 11, 10]) - >>> - >>> # Multiple effects with different dimensions - >>> effects_per_flow_hour = { - ... 'costs': 10.5, # Scalar - ... 'CO2': np.array([0.3, 0.4, 0.3]), # Time-varying - ... } -""" - -PeriodicEffectsUser = EffectData[Period, Scenario] -""" -Periodic effects data: numeric values that can vary with planning periods and scenarios (no time dimension). - -Type: `EffectData[Period, Scenario]` = `NumericData[Period, Scenario] | dict[str, NumericData[Period, Scenario]]` - -Can be: -- A single numeric value (scalar, array, Series, DataFrame, DataArray) with at most [Period, Scenario] dimensions - → Applied to the standard effect -- A dictionary mapping effect names to numeric values with at most [Period, Scenario] dimensions - → Applied to named effects (e.g., {'costs': 1000, 'CO2': 50}) - -Typical uses: -- Investment costs (vary by period but not time) -- Fixed operating costs (per period) -- Retirement effects - -Examples: - >>> # Fixed cost for investment - >>> effects_of_investment = 1000 - >>> - >>> # Period-varying costs (e.g., different years) - >>> effects_of_investment = np.array([1000, 1200, 1100]) # Years 2020, 2025, 2030 - >>> - >>> # Multiple periodic effects - >>> effects_of_investment = { - ... 'costs': 1000, - ... 'CO2': 50, - ... } -""" - -TemporalEffects = dict[str, TemporalData] # User-specified Shares to Effects -""" This datatype is used internally to handle temporal shares to an effect. """ - -PeriodicEffects = dict[str, Scalar] -""" This datatype is used internally to handle scalar shares to an effect. """ - EffectExpr = dict[str, linopy.LinearExpression] # Used to create Shares @@ -559,8 +491,8 @@ def add_effects(self, *effects: Effect) -> None: logger.info(f'Registered new Effect: {effect.label}') def create_effect_values_dict( - self, effect_values_user: PeriodicEffectsUser | TemporalEffectsUser - ) -> dict[str, Scalar | NumericData[Time, Period, Scenario]] | None: + self, effect_values_user: EffectData[Time, Period, Scenario] + ) -> dict[str, NumericData[Time, Period, Scenario]] | None: """Converts effect values into a dictionary. If a scalar is provided, it is associated with a default effect type. Examples: @@ -920,8 +852,3 @@ def tuples_to_adjacency_list(edges: list[tuple[str, str]]) -> dict[str, list[str graph[target] = [] return graph - - -# Backward compatibility aliases -NonTemporalEffectsUser = PeriodicEffectsUser -NonTemporalEffects = PeriodicEffects diff --git a/flixopt/elements.py b/flixopt/elements.py index e97169f0a..bc7b78b7f 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -22,9 +22,8 @@ if TYPE_CHECKING: import linopy - from .effects import TemporalEffectsUser from .flow_system import FlowSystem - from .types import NumericData, Period, Scenario, Time + from .types import EffectData, NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -424,7 +423,7 @@ def __init__( fixed_relative_profile: NumericData[Time, Period, Scenario] | None = None, relative_minimum: NumericData[Time, Period, Scenario] = 0, relative_maximum: NumericData[Time, Period, Scenario] = 1, - effects_per_flow_hour: TemporalEffectsUser | None = None, + effects_per_flow_hour: EffectData[Time, Period, Scenario] | NumericData[Time, Period, Scenario] | None = None, on_off_parameters: OnOffParameters | None = None, flow_hours_total_max: NumericData[Period, Scenario] | None = None, flow_hours_total_min: NumericData[Period, Scenario] | None = None, diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 9c195d926..aa1cec586 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -22,14 +22,7 @@ FlowSystemDimensions, TimeSeriesData, ) -from .effects import ( - Effect, - EffectCollection, - PeriodicEffects, - PeriodicEffectsUser, - TemporalEffects, - TemporalEffectsUser, -) +from .effects import Effect, EffectCollection from .elements import Bus, Component, Flow from .structure import CompositeContainerMixin, Element, ElementContainer, FlowSystemModel, Interface @@ -39,7 +32,7 @@ import pyvis - from .types import BoolData, NumericData, Period, Scenario, Time + from .types import BoolData, EffectData, NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -570,11 +563,11 @@ def fit_to_model_coords( def fit_effects_to_model_coords( self, label_prefix: str | None, - effect_values: TemporalEffectsUser | PeriodicEffectsUser | None, + effect_values: EffectData[Time, Period, Scenario] | NumericData[Time, Period, Scenario] | None, label_suffix: str | None = None, dims: Collection[FlowSystemDimensions] | None = None, delimiter: str = '|', - ) -> TemporalEffects | PeriodicEffects | None: + ) -> EffectData[Time, Period, Scenario] | None: """ Transform EffectValues from the user to Internal Datatypes aligned with model coordinates. """ diff --git a/flixopt/interface.py b/flixopt/interface.py index 541357e48..e986bb078 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -19,9 +19,8 @@ if TYPE_CHECKING: # for type checking and preventing circular imports from collections.abc import Iterator - from .effects import PeriodicEffectsUser, TemporalEffectsUser from .flow_system import FlowSystem - from .types import NumericData, Period, Scenario, Time + from .types import EffectData, NumericData, Period, Scenario, Time logger = logging.getLogger('flixopt') @@ -878,9 +877,9 @@ def __init__( minimum_size: NumericData[Period, Scenario] | None = None, maximum_size: NumericData[Period, Scenario] | None = None, mandatory: bool = False, - effects_of_investment: PeriodicEffectsUser | None = None, - effects_of_investment_per_size: PeriodicEffectsUser | None = None, - effects_of_retirement: PeriodicEffectsUser | None = None, + effects_of_investment: EffectData[Period, Scenario] | NumericData[Period, Scenario] | None = None, + effects_of_investment_per_size: EffectData[Period, Scenario] | NumericData[Period, Scenario] | None = None, + effects_of_retirement: EffectData[Period, Scenario] | NumericData[Period, Scenario] | None = None, piecewise_effects_of_investment: PiecewiseEffects | None = None, linked_periods: NumericData[Period, Scenario] | tuple[int, int] | None = None, **kwargs, @@ -912,15 +911,11 @@ def __init__( # Validate any remaining unexpected kwargs self._validate_kwargs(kwargs) - self.effects_of_investment: PeriodicEffectsUser = ( - effects_of_investment if effects_of_investment is not None else {} - ) - self.effects_of_retirement: PeriodicEffectsUser = ( - effects_of_retirement if effects_of_retirement is not None else {} - ) + self.effects_of_investment = effects_of_investment if effects_of_investment is not None else {} + self.effects_of_retirement = effects_of_retirement if effects_of_retirement is not None else {} self.fixed_size = fixed_size self.mandatory = mandatory - self.effects_of_investment_per_size: PeriodicEffectsUser = ( + self.effects_of_investment_per_size = ( effects_of_investment_per_size if effects_of_investment_per_size is not None else {} ) self.piecewise_effects_of_investment = piecewise_effects_of_investment @@ -1004,7 +999,7 @@ def optional(self, value: bool): self.mandatory = not value @property - def fix_effects(self) -> PeriodicEffectsUser: + def fix_effects(self) -> EffectData[Period, Scenario] | NumericData[Period, Scenario]: """Deprecated property. Use effects_of_investment instead.""" warnings.warn( 'The fix_effects property is deprecated. Use effects_of_investment instead.', @@ -1014,7 +1009,7 @@ def fix_effects(self) -> PeriodicEffectsUser: return self.effects_of_investment @property - def specific_effects(self) -> PeriodicEffectsUser: + def specific_effects(self) -> EffectData[Period, Scenario] | NumericData[Period, Scenario]: """Deprecated property. Use effects_of_investment_per_size instead.""" warnings.warn( 'The specific_effects property is deprecated. Use effects_of_investment_per_size instead.', @@ -1024,7 +1019,7 @@ def specific_effects(self) -> PeriodicEffectsUser: return self.effects_of_investment_per_size @property - def divest_effects(self) -> PeriodicEffectsUser: + def divest_effects(self) -> EffectData[Period, Scenario] | NumericData[Period, Scenario]: """Deprecated property. Use effects_of_retirement instead.""" warnings.warn( 'The divest_effects property is deprecated. Use effects_of_retirement instead.', @@ -1268,8 +1263,10 @@ class OnOffParameters(Interface): def __init__( self, - effects_per_switch_on: TemporalEffectsUser | None = None, - effects_per_running_hour: TemporalEffectsUser | None = None, + effects_per_switch_on: EffectData[Time, Period, Scenario] | NumericData[Time, Period, Scenario] | None = None, + effects_per_running_hour: EffectData[Time, Period, Scenario] + | NumericData[Time, Period, Scenario] + | None = None, on_hours_total_min: int | None = None, on_hours_total_max: int | None = None, consecutive_on_hours_min: NumericData[Time, Period, Scenario] | None = None, @@ -1279,12 +1276,8 @@ def __init__( switch_on_total_max: int | None = None, force_switch_on: bool = False, ): - self.effects_per_switch_on: TemporalEffectsUser = ( - effects_per_switch_on if effects_per_switch_on is not None else {} - ) - self.effects_per_running_hour: TemporalEffectsUser = ( - effects_per_running_hour if effects_per_running_hour is not None else {} - ) + self.effects_per_switch_on = effects_per_switch_on if effects_per_switch_on is not None else {} + self.effects_per_running_hour = effects_per_running_hour if effects_per_running_hour is not None else {} self.on_hours_total_min: NumericData[Period, Scenario] = on_hours_total_min self.on_hours_total_max: NumericData[Period, Scenario] = on_hours_total_max self.consecutive_on_hours_min: NumericData[Time, Period, Scenario] = consecutive_on_hours_min diff --git a/flixopt/types.py b/flixopt/types.py index a7527c835..5566da117 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -112,27 +112,12 @@ def __getitem__(cls, dimensions): """ Create a type hint showing maximum dimensions for effect data. - Effect data can be either: - - A single numeric value (scalar, array, Series, DataFrame, DataArray) - - A dict with string keys mapping to numeric values - - This matches the pattern used for effects: either a single contribution or - a dictionary of named contributions. + Effect data is a dict with string keys mapping to numeric values """ # Return Union[] for better type checker compatibility (especially with | None) # Using Union[] instead of | to avoid IDE warnings with "Type[...] | None" syntax - # EffectData = NumericData | dict[str, NumericData] - return Union[ # noqa: UP007 - int, - float, - np.integer, - np.floating, - np.ndarray, - pd.Series, - pd.DataFrame, - xr.DataArray, - dict[str, Union[int, float, np.integer, np.floating, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray]], # noqa: UP007 - ] + # EffectData = dict[str, NumericData] + return dict[str, Union[int, float, np.integer, np.floating, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray]] # noqa: UP007 class NumericData(metaclass=_NumericDataMeta): @@ -331,8 +316,6 @@ class EffectData(metaclass=_EffectDataMeta): -------- NumericData : General numeric data with dimensions BoolData : For boolean data with dimensions - TemporalEffectsUser : Effect type for temporal contributions (dict or single value) - PeriodicEffectsUser : Effect type for periodic contributions (dict or single value) """ # This class is not meant to be instantiated, only used for type hints From 9e11da8ab186e114cc3d0520c8b4e86335dc61b7 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 21:17:42 +0100 Subject: [PATCH 17/35] Update type hints --- flixopt/calculation.py | 4 ++-- flixopt/elements.py | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/flixopt/calculation.py b/flixopt/calculation.py index 5de2c8870..1125da401 100644 --- a/flixopt/calculation.py +++ b/flixopt/calculation.py @@ -26,7 +26,7 @@ from .aggregation import Aggregation, AggregationModel, AggregationParameters from .components import Storage from .config import CONFIG -from .core import DataConverter, Scalar, TimeSeriesData, drop_constant_arrays +from .core import DataConverter, TimeSeriesData, drop_constant_arrays from .features import InvestmentModel from .flow_system import FlowSystem from .results import CalculationResults, SegmentedCalculationResults @@ -103,7 +103,7 @@ def __init__( self._modeled = False @property - def main_results(self) -> dict[str, Scalar | dict]: + def main_results(self) -> dict[str, int | float | dict]: from flixopt.features import InvestmentModel main_results = { diff --git a/flixopt/elements.py b/flixopt/elements.py index bc7b78b7f..c7ff91066 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -13,7 +13,7 @@ from . import io as fx_io from .config import CONFIG -from .core import PlausibilityError, Scalar, TemporalData, TemporalDataUser +from .core import PlausibilityError from .features import InvestmentModel, OnOffModel from .interface import InvestParameters, OnOffParameters from .modeling import BoundingPatterns, ModelingPrimitives, ModelingUtilitiesAbstract @@ -23,7 +23,7 @@ import linopy from .flow_system import FlowSystem - from .types import EffectData, NumericData, Period, Scenario, Time + from .types import EffectData, NumericData, Period, Scalar, Scenario, Time logger = logging.getLogger('flixopt') @@ -429,7 +429,7 @@ def __init__( flow_hours_total_min: NumericData[Period, Scenario] | None = None, load_factor_min: NumericData[Period, Scenario] | None = None, load_factor_max: NumericData[Period, Scenario] | None = None, - previous_flow_rate: NumericData[Period, Scenario] | list[Scalar] | None = None, + previous_flow_rate: Scalar | list[Scalar] | None = None, meta_data: dict | None = None, ): super().__init__(label, meta_data=meta_data) @@ -716,13 +716,13 @@ def _create_bounds_for_load_factor(self): ) @property - def relative_flow_rate_bounds(self) -> tuple[TemporalData, TemporalData]: + def relative_flow_rate_bounds(self) -> tuple[xr.DataArray, xr.DataArray]: if self.element.fixed_relative_profile is not None: return self.element.fixed_relative_profile, self.element.fixed_relative_profile return self.element.relative_minimum, self.element.relative_maximum @property - def absolute_flow_rate_bounds(self) -> tuple[TemporalData, TemporalData]: + def absolute_flow_rate_bounds(self) -> tuple[xr.DataArray, xr.DataArray]: """ Returns the absolute bounds the flow_rate can reach. Further constraining might be needed @@ -765,7 +765,7 @@ def investment(self) -> InvestmentModel | None: return self.submodels['investment'] @property - def previous_states(self) -> TemporalData | None: + def previous_states(self) -> xr.DataArray | None: """Previous states of the flow rate""" # TODO: This would be nicer to handle in the Flow itself, and allow DataArrays as well. previous_flow_rate = self.element.previous_flow_rate From d772169dfb0fe4f3cb154e73089d02fa40b1484b Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 21:43:58 +0100 Subject: [PATCH 18/35] Use | instead of Union --- flixopt/types.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/flixopt/types.py b/flixopt/types.py index 5566da117..6776ad415 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -100,9 +100,8 @@ def __getitem__(cls, dimensions): Same semantics as NumericData, but for boolean values. """ - # Return Union[] for better type checker compatibility (especially with | None) - # Using Union[] instead of | to avoid IDE warnings with "Type[...] | None" syntax - return Union[bool, np.bool_, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray] # noqa: UP007 + # Return using | operator for better IDE compatibility + return bool | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray class _EffectDataMeta(type): @@ -114,10 +113,9 @@ def __getitem__(cls, dimensions): Effect data is a dict with string keys mapping to numeric values """ - # Return Union[] for better type checker compatibility (especially with | None) - # Using Union[] instead of | to avoid IDE warnings with "Type[...] | None" syntax + # Return using | operator for better IDE compatibility # EffectData = dict[str, NumericData] - return dict[str, Union[int, float, np.integer, np.floating, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray]] # noqa: UP007 + return dict[str, int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray] class NumericData(metaclass=_NumericDataMeta): From 18d51623d333154e29f75c75080e2b38c0bda411 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Fri, 14 Nov 2025 21:44:10 +0100 Subject: [PATCH 19/35] Use | instead of Union --- flixopt/types.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/flixopt/types.py b/flixopt/types.py index 6776ad415..43dd642e0 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -86,9 +86,8 @@ def __getitem__(cls, dimensions): # of which dimensions are specified. The dimension parameters serve # as documentation rather than runtime validation. - # Return Union[] for better type checker compatibility (especially with | None) - # Using Union[] instead of | to avoid IDE warnings with "Type[...] | None" syntax - return Union[int, float, np.integer, np.floating, np.ndarray, pd.Series, pd.DataFrame, xr.DataArray] # noqa: UP007 + # Return using | operator for better IDE compatibility + return int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray class _BoolDataMeta(type): @@ -328,7 +327,7 @@ def __init__(self): __all__ = [ 'NumericData', # Primary public type for numeric data 'BoolData', # Primary public type for boolean data - 'EffectData', # Primary public type for effect data (semantic variant of NumericData) + 'EffectData', # Primary public type for effect data 'Time', 'Period', 'Scenario', From 242eecd2fae5d97d23dfb664b302291bffcc94a2 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 09:00:27 +0100 Subject: [PATCH 20/35] Direct type hints --- flixopt/types.py | 336 +++++++---------------------------------------- 1 file changed, 44 insertions(+), 292 deletions(-) diff --git a/flixopt/types.py b/flixopt/types.py index 43dd642e0..0a66436e3 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -1,33 +1,36 @@ """ Type system for dimension-aware data in flixopt. -This module provides generic types that clearly communicate which dimensions +This module provides type aliases that clearly communicate which dimensions data can have. The type system is designed to be self-documenting while maintaining maximum flexibility for input formats. Key Concepts ------------ -- Dimension markers (`Time`, `Period`, `Scenario`) represent the possible dimensions -- `NumericData[...]` generic type indicates the **maximum** dimensions data can have +- Type aliases use suffix notation to indicate dimensions: + - `_T`: Time dimension only + - `_TS`: Time and Scenario dimensions + - `_PS`: Period and Scenario dimensions (no time) + - `_TPS`: Time, Period, and Scenario dimensions - Data can have any subset of the specified dimensions (including being scalar) - All standard input formats are supported (scalar, array, Series, DataFrame, DataArray) Examples -------- -Type hint `NumericData[Time]` accepts: +Type hint `Numeric_T` accepts: - Scalar: `0.5` (broadcast to all timesteps) - 1D array: `np.array([1, 2, 3])` (matched to time dimension) - pandas Series: with DatetimeIndex matching flow system - xarray DataArray: with 'time' dimension -Type hint `NumericData[Time, Scenario]` accepts: +Type hint `Numeric_TS` accepts: - Scalar: `100` (broadcast to all time and scenario combinations) - 1D array: matched to time OR scenario dimension - 2D array: matched to both dimensions - pandas DataFrame: columns as scenarios, index as time - xarray DataArray: with any subset of 'time', 'scenario' dimensions -Type hint `NumericData[Period, Scenario]` (periodic data, no time): +Type hint `Numeric_PS` (periodic data, no time): - Used for investment parameters that vary by planning period - Accepts scalars, arrays matching periods/scenarios, or DataArrays @@ -36,300 +39,49 @@ - Not converted to DataArray, stays as scalar """ -from typing import Any, TypeAlias, Union +from typing import TypeAlias import numpy as np import pandas as pd import xarray as xr - -# Dimension marker classes for generic type subscripting -class Time: - """Marker for the time dimension in Data generic types.""" - - pass - - -class Period: - """Marker for the period dimension in Data generic types (for multi-period optimization).""" - - pass - - -class Scenario: - """Marker for the scenario dimension in Data generic types (for scenario analysis).""" - - pass - - -class _NumericDataMeta(type): - """Metaclass for NumericData to enable subscript notation NumericData[Time, Scenario] for numeric data.""" - - def __getitem__(cls, dimensions): - """ - Create a type hint showing maximum dimensions for numeric data. - - The dimensions parameter can be: - - A single dimension: NumericData[Time] - - Multiple dimensions: NumericData[Time, Period, Scenario] - - The type hint communicates that data can have **at most** these dimensions. - Actual data can be: - - Scalar (broadcast to all dimensions) - - Have any subset of the specified dimensions - - Have all specified dimensions - - This is consistent with xarray's broadcasting semantics and the - framework's data conversion behavior. - """ - # For type checking purposes, we return the same union type regardless - # of which dimensions are specified. The dimension parameters serve - # as documentation rather than runtime validation. - - # Return using | operator for better IDE compatibility - return int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray - - -class _BoolDataMeta(type): - """Metaclass for BoolData to enable subscript notation BoolData[Time, Scenario] for boolean data.""" - - def __getitem__(cls, dimensions): - """ - Create a type hint showing maximum dimensions for boolean data. - - Same semantics as NumericData, but for boolean values. - """ - # Return using | operator for better IDE compatibility - return bool | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray - - -class _EffectDataMeta(type): - """Metaclass for EffectData to enable subscript notation EffectData[Time, Period, Scenario] for effect data.""" - - def __getitem__(cls, dimensions): - """ - Create a type hint showing maximum dimensions for effect data. - - Effect data is a dict with string keys mapping to numeric values - """ - # Return using | operator for better IDE compatibility - # EffectData = dict[str, NumericData] - return dict[str, int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray] - - -class NumericData(metaclass=_NumericDataMeta): - """ - Base type for numeric data that can have various dimensions. - - This is the internal base class. Use `NumericData` publicly for clarity. - - Use subscript notation to specify the maximum dimensions: - - `NumericData[Time]`: Time-varying numeric data (at most 'time' dimension) - - `NumericData[Time, Scenario]`: Time-varying with scenarios (at most 'time', 'scenario') - - `NumericData[Period, Scenario]`: Periodic data without time (at most 'period', 'scenario') - - `NumericData[Time, Period, Scenario]`: Full dimensionality (rarely used) - - Semantics: "At Most" Dimensions - -------------------------------- - When you see `NumericData[Time, Scenario]`, it means the data can have: - - No dimensions (scalar): broadcast to all time and scenario values - - Just 'time': broadcast across scenarios - - Just 'scenario': broadcast across time - - Both 'time' and 'scenario': full dimensionality - - Accepted Input Formats - ---------------------- - All dimension combinations accept these formats: - - Scalars: int, float (including numpy types) - - Arrays: numpy ndarray (matched by length/shape to dimensions) - - pandas Series: matched by index to dimension coordinates - - pandas DataFrame: typically columns=scenarios, index=time - - xarray DataArray: used directly with dimension validation - - Conversion Behavior - ------------------- - Input data is converted to xarray.DataArray internally: - - Scalars are broadcast to all specified dimensions - - Arrays are matched by length (unambiguous) or shape (multi-dimensional) - - Series are matched by index equality with coordinate values - - DataArrays are validated and broadcast as needed - - Note - ---- - This type is for **numeric** data only. For boolean data, use `BoolData`. - - This is the base class - use `NumericData` alias publicly for clarity and symmetry with `BoolData`. - - See Also - -------- - BoolData : For boolean data with dimensions - DataConverter.to_dataarray : The conversion implementation - FlowSystem.fit_to_model_coords : Fits data to the model's coordinate system - """ - - # This class is not meant to be instantiated, only used for type hints - def __init__(self): - raise TypeError('NumericData is a type hint only and cannot be instantiated') - - -class BoolData(metaclass=_BoolDataMeta): - """ - Generic type for boolean data that can have various dimensions. - - Use subscript notation to specify the maximum dimensions: - - `BoolData[Time]`: Time-varying boolean data - - `BoolData[Time, Scenario]`: Boolean data with time and scenario dimensions - - `BoolData[Period, Scenario]`: Periodic boolean data - - Semantics: "At Most" Dimensions - -------------------------------- - Same semantics as NumericData, but for boolean values. - When you see `BoolData[Time, Scenario]`, the data can have: - - No dimensions (scalar bool): broadcast to all time and scenario values - - Just 'time': broadcast across scenarios - - Just 'scenario': broadcast across time - - Both 'time' and 'scenario': full dimensionality - - Accepted Input Formats (Boolean) - --------------------------------- - All dimension combinations accept these formats: - - Scalars: bool, np.bool_ - - Arrays: numpy ndarray with boolean dtype (matched by length/shape to dimensions) - - pandas Series: with boolean values, matched by index to dimension coordinates - - pandas DataFrame: with boolean values - - xarray DataArray: with boolean values, used directly with dimension validation - - Use Cases - --------- - Boolean data is typically used for: - - Binary decision variables (on/off states) - - Constraint activation flags - - Feasibility indicators - - Conditional parameters - - Examples - -------- - >>> # Scalar boolean (broadcast to all dimensions) - >>> active: BoolData[Time] = True - >>> - >>> # Time-varying on/off pattern - >>> import numpy as np - >>> pattern: BoolData[Time] = np.array([True, False, True, False]) - >>> - >>> # Scenario-specific activation - >>> import pandas as pd - >>> scenario_active: BoolData[Scenario] = pd.Series([True, False, True], index=['low', 'mid', 'high']) - - Note - ---- - This type is for **boolean** data only. For numeric data, use `NumericData`. - - See Also - -------- - NumericData : For numeric data with dimensions - DataConverter.to_dataarray : The conversion implementation - """ - - # This class is not meant to be instantiated, only used for type hints - def __init__(self): - raise TypeError('BoolData is a type hint only and cannot be instantiated') - - -class EffectData(metaclass=_EffectDataMeta): - """ - Generic type for effect data that can be a single value or a dictionary of values. - - EffectData represents the common pattern for effects: either a single numeric contribution - or a dictionary with string keys mapping to numeric contributions. This is useful for - specifying effects where you either have a single effect or multiple named effects. - - Use subscript notation to specify the maximum dimensions: - - `EffectData[Time]`: Time-varying effect data - - `EffectData[Period, Scenario]`: Periodic effect data - - `EffectData[Time, Period, Scenario]`: Full dimensional effect data - - Type Structure - -------------- - `EffectData[dims]` = `NumericData[dims] | dict[str, NumericData[dims]]` - - This means you can provide: - - A single numeric value (scalar, array, Series, DataFrame, DataArray) - - A dict mapping effect names to numeric values - - Semantics: "At Most" Dimensions - -------------------------------- - When you see `EffectData[Time, Period, Scenario]`, it means the data can have: - - No dimensions (scalar): broadcast to all time, period, and scenario values - - Any subset: just time, just period, just scenario, time+period, etc. - - All dimensions: full 3D data - - Accepted Input Formats - ---------------------- - Single value: - - Scalars: int, float (including numpy types) - - Arrays: numpy ndarray with numeric dtype (matched by length/shape to dimensions) - - pandas Series: matched by index to dimension coordinates - - pandas DataFrame: typically columns=scenarios/periods, index=time - - xarray DataArray: used directly with dimension validation - - Dictionary of values: - - dict[str, ] - - Typical Use Cases - ----------------- - - Single effect: `EffectData[Time] = 10.5` or `np.array([10, 12, 11])` - - Multiple effects: `EffectData[Time] = {'CO2': 0.5, 'costs': 100}` - - Cross-effect relationships in Effect class - - Component effect contributions (effects_per_flow_hour, etc.) - - Examples - -------- - >>> # Single scalar effect (broadcast to all dimensions) - >>> single_cost: EffectData[Time, Period, Scenario] = 10.5 - >>> - >>> # Single time-varying effect - >>> single_emissions: EffectData[Time] = np.array([100, 120, 110]) - >>> - >>> # Multiple named effects (dict) - >>> multiple_effects: EffectData[Time] = { - ... 'CO2': np.array([0.5, 0.6, 0.5]), - ... 'costs': 100, # scalar broadcast to all time - ... } - >>> - >>> # Cross-effect in Effect class - >>> cost_effect = Effect( - ... label='total_costs', - ... unit='€', - ... is_objective=True, - ... share_from_temporal={'CO2': 0.1}, # EffectData[Time, Period, Scenario] - ... ) - - Note - ---- - EffectData = NumericData | dict[str, NumericData]. This pattern is specific to effects - and different from NumericData which only represents single numeric values. - - See Also - -------- - NumericData : General numeric data with dimensions - BoolData : For boolean data with dimensions - """ - - # This class is not meant to be instantiated, only used for type hints - def __init__(self): - raise TypeError('EffectData is a type hint only and cannot be instantiated') - - -# Simple scalar type for dimension-less numeric values +# Internal base types +_Numeric: TypeAlias = int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray +_Bool: TypeAlias = bool | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray +_Effect: TypeAlias = _Numeric | dict[str, _Numeric] + +# Numeric data with dimension combinations +Numeric_T: TypeAlias = _Numeric # Time +Numeric_TS: TypeAlias = _Numeric # Time, Scenario +Numeric_PS: TypeAlias = _Numeric # Period, Scenario +Numeric_TPS: TypeAlias = _Numeric # Time, Period, Scenario + +# Boolean data with dimension combinations +Bool_T: TypeAlias = _Bool +Bool_TS: TypeAlias = _Bool +Bool_PS: TypeAlias = _Bool + +# Effect data with dimension combinations +Effect_T: TypeAlias = _Effect +Effect_TS: TypeAlias = _Effect +Effect_PS: TypeAlias = _Effect +Effect_TPS: TypeAlias = _Effect + +# Scalar (no dimensions) Scalar: TypeAlias = int | float | np.integer | np.floating # Export public API __all__ = [ - 'NumericData', # Primary public type for numeric data - 'BoolData', # Primary public type for boolean data - 'EffectData', # Primary public type for effect data - 'Time', - 'Period', - 'Scenario', + 'Numeric_T', + 'Numeric_TS', + 'Numeric_PS', + 'Numeric_TPS', + 'Bool_T', + 'Bool_TS', + 'Bool_PS', + 'Effect_T', + 'Effect_TS', + 'Effect_PS', + 'Effect_TPS', 'Scalar', ] From d82c5d2665092b2727a31e05e27a611b040bc066 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 10:35:08 +0100 Subject: [PATCH 21/35] Use direct type hints instead of subscripts --- flixopt/__init__.py | 2 +- flixopt/components.py | 38 ++++++++++++------------- flixopt/core.py | 21 -------------- flixopt/effects.py | 44 ++++++++++++++--------------- flixopt/elements.py | 22 +++++++-------- flixopt/features.py | 4 +-- flixopt/flow_system.py | 8 +++--- flixopt/interface.py | 54 ++++++++++++++++++------------------ flixopt/linear_converters.py | 20 ++++++------- flixopt/modeling.py | 1 - flixopt/types.py | 32 ++++++++++----------- 11 files changed, 110 insertions(+), 136 deletions(-) diff --git a/flixopt/__init__.py b/flixopt/__init__.py index 52560bc85..29d6813ab 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -42,7 +42,7 @@ ) # Type system for dimension-aware type hints -from .types import BoolData, EffectData, NumericData, Period, Scalar, Scenario, Time +from .types import Numeric_TPS, Numeric_PS, Numeric_S, Bool_TPS, Bool_PS, Bool_S, Effect_TPS, Effect_PS, Effect_S, Scalar # === Runtime warning suppression for third-party libraries === # These warnings are from dependencies and cannot be fixed by end users. diff --git a/flixopt/components.py b/flixopt/components.py index 632bb6ee9..b22b0caf5 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -12,7 +12,7 @@ import xarray as xr from . import io as fx_io -from .core import PeriodicDataUser, PlausibilityError, TemporalData, TemporalDataUser +from .core import PlausibilityError from .elements import Component, ComponentModel, Flow from .features import InvestmentModel, PiecewiseModel from .interface import InvestParameters, OnOffParameters, PiecewiseConversion @@ -387,17 +387,17 @@ def __init__( label: str, charging: Flow, discharging: Flow, - capacity_in_flow_hours: NumericData[Period, Scenario] | InvestParameters, - relative_minimum_charge_state: NumericData[Time, Period, Scenario] = 0, - relative_maximum_charge_state: NumericData[Time, Period, Scenario] = 1, - initial_charge_state: NumericData[Period, Scenario] | Literal['lastValueOfSim'] = 0, - minimal_final_charge_state: NumericData[Period, Scenario] | None = None, - maximal_final_charge_state: NumericData[Period, Scenario] | None = None, - relative_minimum_final_charge_state: NumericData[Period, Scenario] | None = None, - relative_maximum_final_charge_state: NumericData[Period, Scenario] | None = None, - eta_charge: NumericData[Time, Period, Scenario] = 1, - eta_discharge: NumericData[Time, Period, Scenario] = 1, - relative_loss_per_hour: NumericData[Time, Period, Scenario] = 0, + capacity_in_flow_hours: Numeric_PS | InvestParameters, + relative_minimum_charge_state: Numeric_TPS = 0, + relative_maximum_charge_state: Numeric_TPS = 1, + initial_charge_state: Numeric_PS | Literal['lastValueOfSim'] = 0, + minimal_final_charge_state: Numeric_PS | None = None, + maximal_final_charge_state: Numeric_PS | None = None, + relative_minimum_final_charge_state: Numeric_PS | None = None, + relative_maximum_final_charge_state: Numeric_PS | None = None, + eta_charge: Numeric_TPS = 1, + eta_discharge: Numeric_TPS = 1, + relative_loss_per_hour: Numeric_TPS = 0, prevent_simultaneous_charge_and_discharge: bool = True, balanced: bool = False, meta_data: dict | None = None, @@ -414,8 +414,8 @@ def __init__( self.charging = charging self.discharging = discharging self.capacity_in_flow_hours = capacity_in_flow_hours - self.relative_minimum_charge_state: NumericData[Time, Period, Scenario] = relative_minimum_charge_state - self.relative_maximum_charge_state: NumericData[Time, Period, Scenario] = relative_maximum_charge_state + self.relative_minimum_charge_state: Numeric_TPS = relative_minimum_charge_state + self.relative_maximum_charge_state: Numeric_TPS = relative_maximum_charge_state self.relative_minimum_final_charge_state = relative_minimum_final_charge_state self.relative_maximum_final_charge_state = relative_maximum_final_charge_state @@ -424,9 +424,9 @@ def __init__( self.minimal_final_charge_state = minimal_final_charge_state self.maximal_final_charge_state = maximal_final_charge_state - self.eta_charge: NumericData[Time, Period, Scenario] = eta_charge - self.eta_discharge: NumericData[Time, Period, Scenario] = eta_discharge - self.relative_loss_per_hour: NumericData[Time, Period, Scenario] = relative_loss_per_hour + self.eta_charge: Numeric_TPS = eta_charge + self.eta_discharge: Numeric_TPS = eta_discharge + self.relative_loss_per_hour: Numeric_TPS = relative_loss_per_hour self.prevent_simultaneous_charge_and_discharge = prevent_simultaneous_charge_and_discharge self.balanced = balanced @@ -664,8 +664,8 @@ def __init__( out1: Flow, in2: Flow | None = None, out2: Flow | None = None, - relative_losses: NumericData[Time, Period, Scenario] | None = None, - absolute_losses: NumericData[Time, Period, Scenario] | None = None, + relative_losses: Numeric_TPS | None = None, + absolute_losses: Numeric_TPS | None = None, on_off_parameters: OnOffParameters = None, prevent_simultaneous_flows_in_both_directions: bool = True, balanced: bool = False, diff --git a/flixopt/core.py b/flixopt/core.py index 406a6ea7c..c10248c6c 100644 --- a/flixopt/core.py +++ b/flixopt/core.py @@ -12,8 +12,6 @@ import pandas as pd import xarray as xr -from flixopt.types import NumericData, Period, Scenario, Time - logger = logging.getLogger('flixopt') FlowSystemDimensions = Literal['time', 'period', 'scenario'] @@ -143,25 +141,6 @@ def agg_weight(self): return self.aggregation_weight -TemporalDataUser = NumericData[Time, Scenario] -""" -User data which might have a time dimension. Internally converted to an xr.DataArray with time dimension. - -Supports data with at most [Time, Scenario] dimensions. For periodic data (no time dimension), use PeriodicDataUser. -For data with all three dimensions [Time, Period, Scenario], use NumericData[Time, Period, Scenario] directly. -""" - -PeriodicDataUser = NumericData[Period, Scenario] -""" -User data for periodic parameters (no time dimension). Internally converted to an xr.DataArray. - -Supports data with at most [Period, Scenario] dimensions. For temporal data (with time), use TemporalDataUser. -""" - -TemporalData = xr.DataArray | TimeSeriesData -"""Internally used datatypes for temporal data (data with a time dimension).""" - - class DataConverter: """ Converts various data types into xarray.DataArray with specified target coordinates. diff --git a/flixopt/effects.py b/flixopt/effects.py index 120158b09..3aa0117e1 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -57,21 +57,21 @@ class Effect(Element): Maps periodic contributions from other effects to this effect. Type: `PeriodicEffectsUser` (single value or dict with dimensions [Period, Scenario]) minimum_temporal: Minimum allowed total contribution across all timesteps. - Type: `NumericData[Period, Scenario]` (sum over time, can vary by period/scenario) + Type: `Numeric_PS` (sum over time, can vary by period/scenario) maximum_temporal: Maximum allowed total contribution across all timesteps. - Type: `NumericData[Period, Scenario]` (sum over time, can vary by period/scenario) + Type: `Numeric_PS` (sum over time, can vary by period/scenario) minimum_per_hour: Minimum allowed contribution per hour. - Type: `NumericData[Time, Period, Scenario]` (per-timestep constraint, can vary by period) + Type: `Numeric_TPS` (per-timestep constraint, can vary by period) maximum_per_hour: Maximum allowed contribution per hour. - Type: `NumericData[Time, Period, Scenario]` (per-timestep constraint, can vary by period) + Type: `Numeric_TPS` (per-timestep constraint, can vary by period) minimum_periodic: Minimum allowed total periodic contribution. - Type: `NumericData[Period, Scenario]` (periodic constraint) + Type: `Numeric_PS` (periodic constraint) maximum_periodic: Maximum allowed total periodic contribution. - Type: `NumericData[Period, Scenario]` (periodic constraint) + Type: `Numeric_PS` (periodic constraint) minimum_total: Minimum allowed total effect (temporal + periodic combined). - Type: `NumericData[Period, Scenario]` (total constraint per period) + Type: `Numeric_PS` (total constraint per period) maximum_total: Maximum allowed total effect (temporal + periodic combined). - Type: `NumericData[Period, Scenario]` (total constraint per period) + Type: `Numeric_PS` (total constraint per period) meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. @@ -179,16 +179,16 @@ def __init__( meta_data: dict | None = None, is_standard: bool = False, is_objective: bool = False, - share_from_temporal: EffectData[Time, Period, Scenario] | None = None, - share_from_periodic: EffectData[Period, Scenario] | None = None, - minimum_temporal: NumericData[Period, Scenario] | None = None, - maximum_temporal: NumericData[Period, Scenario] | None = None, - minimum_periodic: NumericData[Period, Scenario] | None = None, - maximum_periodic: NumericData[Period, Scenario] | None = None, - minimum_per_hour: NumericData[Time, Period, Scenario] | None = None, - maximum_per_hour: NumericData[Time, Period, Scenario] | None = None, - minimum_total: NumericData[Period, Scenario] | None = None, - maximum_total: NumericData[Period, Scenario] | None = None, + share_from_temporal: Effect_TPS | None = None, + share_from_periodic: Effect_PS | None = None, + minimum_temporal: Numeric_PS | None = None, + maximum_temporal: Numeric_PS | None = None, + minimum_periodic: Numeric_PS | None = None, + maximum_periodic: Numeric_PS | None = None, + minimum_per_hour: Numeric_TPS | None = None, + maximum_per_hour: Numeric_TPS | None = None, + minimum_total: Numeric_PS | None = None, + maximum_total: Numeric_PS | None = None, **kwargs, ): super().__init__(label, meta_data=meta_data) @@ -196,10 +196,10 @@ def __init__( self.description = description self.is_standard = is_standard self.is_objective = is_objective - self.share_from_temporal: EffectData[Time, Period, Scenario] = ( + self.share_from_temporal: Effect_TPS = ( share_from_temporal if share_from_temporal is not None else {} ) - self.share_from_periodic: EffectData[Period, Scenario] = ( + self.share_from_periodic: Effect_PS = ( share_from_periodic if share_from_periodic is not None else {} ) @@ -491,8 +491,8 @@ def add_effects(self, *effects: Effect) -> None: logger.info(f'Registered new Effect: {effect.label}') def create_effect_values_dict( - self, effect_values_user: EffectData[Time, Period, Scenario] - ) -> dict[str, NumericData[Time, Period, Scenario]] | None: + self, effect_values_user: Effect_TPS + ) -> dict[str, Numeric_TPS] | None: """Converts effect values into a dictionary. If a scalar is provided, it is associated with a default effect type. Examples: diff --git a/flixopt/elements.py b/flixopt/elements.py index c7ff91066..37e208435 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -23,7 +23,7 @@ import linopy from .flow_system import FlowSystem - from .types import EffectData, NumericData, Period, Scalar, Scenario, Time + from .types import Numeric_TPS, Numeric_PS, Numeric_S, Bool_TPS, Bool_PS, Bool_S, Effect_TPS, Effect_PS, Effect_S, Scalar logger = logging.getLogger('flixopt') @@ -228,7 +228,7 @@ class Bus(Element): def __init__( self, label: str, - excess_penalty_per_flow_hour: NumericData[Time, Period, Scenario] | None = 1e5, + excess_penalty_per_flow_hour: Numeric_TPS | None = 1e5, meta_data: dict | None = None, ): super().__init__(label, meta_data=meta_data) @@ -419,16 +419,16 @@ def __init__( self, label: str, bus: str, - size: NumericData[Period, Scenario] | InvestParameters = None, - fixed_relative_profile: NumericData[Time, Period, Scenario] | None = None, - relative_minimum: NumericData[Time, Period, Scenario] = 0, - relative_maximum: NumericData[Time, Period, Scenario] = 1, - effects_per_flow_hour: EffectData[Time, Period, Scenario] | NumericData[Time, Period, Scenario] | None = None, + size: Numeric_PS | InvestParameters = None, + fixed_relative_profile: Numeric_TPS | None = None, + relative_minimum: Numeric_TPS = 0, + relative_maximum: Numeric_TPS = 1, + effects_per_flow_hour: Effect_TPS | Numeric_TPS | None = None, on_off_parameters: OnOffParameters | None = None, - flow_hours_total_max: NumericData[Period, Scenario] | None = None, - flow_hours_total_min: NumericData[Period, Scenario] | None = None, - load_factor_min: NumericData[Period, Scenario] | None = None, - load_factor_max: NumericData[Period, Scenario] | None = None, + flow_hours_total_max: Numeric_PS | None = None, + flow_hours_total_min: Numeric_PS | None = None, + load_factor_min: Numeric_PS | None = None, + load_factor_max: Numeric_PS | None = None, previous_flow_rate: Scalar | list[Scalar] | None = None, meta_data: dict | None = None, ): diff --git a/flixopt/features.py b/flixopt/features.py index e6d400556..172b3dc24 100644 --- a/flixopt/features.py +++ b/flixopt/features.py @@ -518,8 +518,8 @@ def __init__( dims: list[FlowSystemDimensions], label_of_element: str | None = None, label_of_model: str | None = None, - total_max: NumericData[Period, Scenario] | None = None, - total_min: NumericData[Period, Scenario] | None = None, + total_max: Numeric_PS | None = None, + total_min: Numeric_PS | None = None, max_per_hour: TemporalData | None = None, min_per_hour: TemporalData | None = None, ): diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index aa1cec586..0af1007f1 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -159,7 +159,7 @@ def __init__( scenarios: pd.Index | None = None, hours_of_last_timestep: int | float | None = None, hours_of_previous_timesteps: int | float | np.ndarray | None = None, - weights: NumericData[Period, Scenario] | None = None, + weights: Numeric_PS | None = None, scenario_independent_sizes: bool | list[str] = True, scenario_independent_flow_rates: bool | list[str] = False, ): @@ -523,7 +523,7 @@ def to_json(self, path: str | pathlib.Path): def fit_to_model_coords( self, name: str, - data: NumericData[Time, Period, Scenario] | BoolData[Time, Period, Scenario] | None, + data: Numeric_TPS | Bool_TPS | None, dims: Collection[FlowSystemDimensions] | None = None, ) -> xr.DataArray | None: """ @@ -563,11 +563,11 @@ def fit_to_model_coords( def fit_effects_to_model_coords( self, label_prefix: str | None, - effect_values: EffectData[Time, Period, Scenario] | NumericData[Time, Period, Scenario] | None, + effect_values: Effect_TPS | Numeric_TPS | None, label_suffix: str | None = None, dims: Collection[FlowSystemDimensions] | None = None, delimiter: str = '|', - ) -> EffectData[Time, Period, Scenario] | None: + ) -> Effect_TPS | None: """ Transform EffectValues from the user to Internal Datatypes aligned with model coordinates. """ diff --git a/flixopt/interface.py b/flixopt/interface.py index e986bb078..715f894f2 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -72,7 +72,7 @@ class Piece(Interface): """ - def __init__(self, start: NumericData[Time, Period, Scenario], end: NumericData[Time, Period, Scenario]): + def __init__(self, start: Numeric_TPS, end: Numeric_TPS): self.start = start self.end = end self.has_time_dim = False @@ -873,15 +873,15 @@ class InvestParameters(Interface): def __init__( self, - fixed_size: NumericData[Period, Scenario] | None = None, - minimum_size: NumericData[Period, Scenario] | None = None, - maximum_size: NumericData[Period, Scenario] | None = None, + fixed_size: Numeric_PS | None = None, + minimum_size: Numeric_PS | None = None, + maximum_size: Numeric_PS | None = None, mandatory: bool = False, - effects_of_investment: EffectData[Period, Scenario] | NumericData[Period, Scenario] | None = None, - effects_of_investment_per_size: EffectData[Period, Scenario] | NumericData[Period, Scenario] | None = None, - effects_of_retirement: EffectData[Period, Scenario] | NumericData[Period, Scenario] | None = None, + effects_of_investment: Effect_PS | Numeric_PS | None = None, + effects_of_investment_per_size: Effect_PS | Numeric_PS | None = None, + effects_of_retirement: Effect_PS | Numeric_PS | None = None, piecewise_effects_of_investment: PiecewiseEffects | None = None, - linked_periods: NumericData[Period, Scenario] | tuple[int, int] | None = None, + linked_periods: Numeric_PS | tuple[int, int] | None = None, **kwargs, ): # Handle deprecated parameters using centralized helper @@ -999,7 +999,7 @@ def optional(self, value: bool): self.mandatory = not value @property - def fix_effects(self) -> EffectData[Period, Scenario] | NumericData[Period, Scenario]: + def fix_effects(self) -> Effect_PS | Numeric_PS: """Deprecated property. Use effects_of_investment instead.""" warnings.warn( 'The fix_effects property is deprecated. Use effects_of_investment instead.', @@ -1009,7 +1009,7 @@ def fix_effects(self) -> EffectData[Period, Scenario] | NumericData[Period, Scen return self.effects_of_investment @property - def specific_effects(self) -> EffectData[Period, Scenario] | NumericData[Period, Scenario]: + def specific_effects(self) -> Effect_PS | Numeric_PS: """Deprecated property. Use effects_of_investment_per_size instead.""" warnings.warn( 'The specific_effects property is deprecated. Use effects_of_investment_per_size instead.', @@ -1019,7 +1019,7 @@ def specific_effects(self) -> EffectData[Period, Scenario] | NumericData[Period, return self.effects_of_investment_per_size @property - def divest_effects(self) -> EffectData[Period, Scenario] | NumericData[Period, Scenario]: + def divest_effects(self) -> Effect_PS | Numeric_PS: """Deprecated property. Use effects_of_retirement instead.""" warnings.warn( 'The divest_effects property is deprecated. Use effects_of_retirement instead.', @@ -1039,11 +1039,11 @@ def piecewise_effects(self) -> PiecewiseEffects | None: return self.piecewise_effects_of_investment @property - def minimum_or_fixed_size(self) -> NumericData[Period, Scenario]: + def minimum_or_fixed_size(self) -> Numeric_PS: return self.fixed_size if self.fixed_size is not None else self.minimum_size @property - def maximum_or_fixed_size(self) -> NumericData[Period, Scenario]: + def maximum_or_fixed_size(self) -> Numeric_PS: return self.fixed_size if self.fixed_size is not None else self.maximum_size def format_for_repr(self) -> str: @@ -1263,28 +1263,28 @@ class OnOffParameters(Interface): def __init__( self, - effects_per_switch_on: EffectData[Time, Period, Scenario] | NumericData[Time, Period, Scenario] | None = None, - effects_per_running_hour: EffectData[Time, Period, Scenario] - | NumericData[Time, Period, Scenario] + effects_per_switch_on: Effect_TPS | Numeric_TPS | None = None, + effects_per_running_hour: Effect_TPS + | Numeric_TPS | None = None, on_hours_total_min: int | None = None, on_hours_total_max: int | None = None, - consecutive_on_hours_min: NumericData[Time, Period, Scenario] | None = None, - consecutive_on_hours_max: NumericData[Time, Period, Scenario] | None = None, - consecutive_off_hours_min: NumericData[Time, Period, Scenario] | None = None, - consecutive_off_hours_max: NumericData[Time, Period, Scenario] | None = None, + consecutive_on_hours_min: Numeric_TPS | None = None, + consecutive_on_hours_max: Numeric_TPS | None = None, + consecutive_off_hours_min: Numeric_TPS | None = None, + consecutive_off_hours_max: Numeric_TPS | None = None, switch_on_total_max: int | None = None, force_switch_on: bool = False, ): self.effects_per_switch_on = effects_per_switch_on if effects_per_switch_on is not None else {} self.effects_per_running_hour = effects_per_running_hour if effects_per_running_hour is not None else {} - self.on_hours_total_min: NumericData[Period, Scenario] = on_hours_total_min - self.on_hours_total_max: NumericData[Period, Scenario] = on_hours_total_max - self.consecutive_on_hours_min: NumericData[Time, Period, Scenario] = consecutive_on_hours_min - self.consecutive_on_hours_max: NumericData[Time, Period, Scenario] = consecutive_on_hours_max - self.consecutive_off_hours_min: NumericData[Time, Period, Scenario] = consecutive_off_hours_min - self.consecutive_off_hours_max: NumericData[Time, Period, Scenario] = consecutive_off_hours_max - self.switch_on_total_max: NumericData[Period, Scenario] = switch_on_total_max + self.on_hours_total_min: Numeric_PS = on_hours_total_min + self.on_hours_total_max: Numeric_PS = on_hours_total_max + self.consecutive_on_hours_min: Numeric_TPS = consecutive_on_hours_min + self.consecutive_on_hours_max: Numeric_TPS = consecutive_on_hours_max + self.consecutive_off_hours_min: Numeric_TPS = consecutive_off_hours_min + self.consecutive_off_hours_max: Numeric_TPS = consecutive_off_hours_max + self.switch_on_total_max: Numeric_PS = switch_on_total_max self.force_switch_on: bool = force_switch_on def transform_data(self, flow_system: FlowSystem, name_prefix: str = '') -> None: diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 661cdc030..5a9e46f94 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -77,7 +77,7 @@ class Boiler(LinearConverter): def __init__( self, label: str, - eta: NumericData[Time, Period, Scenario], + eta: Numeric_TPS, Q_fu: Flow, Q_th: Flow, on_off_parameters: OnOffParameters | None = None, @@ -164,7 +164,7 @@ class Power2Heat(LinearConverter): def __init__( self, label: str, - eta: NumericData[Time, Period, Scenario], + eta: Numeric_TPS, P_el: Flow, Q_th: Flow, on_off_parameters: OnOffParameters | None = None, @@ -251,7 +251,7 @@ class HeatPump(LinearConverter): def __init__( self, label: str, - COP: NumericData[Time, Period, Scenario], + COP: Numeric_TPS, P_el: Flow, Q_th: Flow, on_off_parameters: OnOffParameters | None = None, @@ -340,7 +340,7 @@ class CoolingTower(LinearConverter): def __init__( self, label: str, - specific_electricity_demand: NumericData[Time, Period, Scenario], + specific_electricity_demand: Numeric_TPS, P_el: Flow, Q_th: Flow, on_off_parameters: OnOffParameters | None = None, @@ -438,8 +438,8 @@ class CHP(LinearConverter): def __init__( self, label: str, - eta_th: NumericData[Time, Period, Scenario], - eta_el: NumericData[Time, Period, Scenario], + eta_th: Numeric_TPS, + eta_el: Numeric_TPS, Q_fu: Flow, P_el: Flow, Q_th: Flow, @@ -552,7 +552,7 @@ class HeatPumpWithSource(LinearConverter): def __init__( self, label: str, - COP: NumericData[Time, Period, Scenario], + COP: Numeric_TPS, P_el: Flow, Q_ab: Flow, Q_th: Flow, @@ -590,11 +590,11 @@ def COP(self, value): # noqa: N802 def check_bounds( - value: NumericData[Time, Period, Scenario], + value: Numeric_TPS, parameter_label: str, element_label: str, - lower_bound: NumericData[Time, Period, Scenario], - upper_bound: NumericData[Time, Period, Scenario], + lower_bound: Numeric_TPS, + upper_bound: Numeric_TPS, ) -> None: """ Check if the value is within the bounds. The bounds are exclusive. diff --git a/flixopt/modeling.py b/flixopt/modeling.py index c7f0bf314..b2676db48 100644 --- a/flixopt/modeling.py +++ b/flixopt/modeling.py @@ -5,7 +5,6 @@ import xarray as xr from .config import CONFIG -from .core import TemporalData from .structure import Submodel logger = logging.getLogger('flixopt') diff --git a/flixopt/types.py b/flixopt/types.py index 0a66436e3..1411f3e30 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -51,37 +51,33 @@ _Effect: TypeAlias = _Numeric | dict[str, _Numeric] # Numeric data with dimension combinations -Numeric_T: TypeAlias = _Numeric # Time -Numeric_TS: TypeAlias = _Numeric # Time, Scenario -Numeric_PS: TypeAlias = _Numeric # Period, Scenario Numeric_TPS: TypeAlias = _Numeric # Time, Period, Scenario +Numeric_PS: TypeAlias = _Numeric # Period, Scenario +Numeric_S: TypeAlias = _Numeric # Scenario # Boolean data with dimension combinations -Bool_T: TypeAlias = _Bool -Bool_TS: TypeAlias = _Bool -Bool_PS: TypeAlias = _Bool +Bool_TPS: TypeAlias = _Bool # Time, Period, Scenario +Bool_PS: TypeAlias = _Bool # Period, Scenario +Bool_S: TypeAlias = _Bool # Scenario # Effect data with dimension combinations -Effect_T: TypeAlias = _Effect -Effect_TS: TypeAlias = _Effect -Effect_PS: TypeAlias = _Effect -Effect_TPS: TypeAlias = _Effect +Effect_TPS: TypeAlias = _Effect # Time, Period, Scenario +Effect_PS: TypeAlias = _Effect # Period, Scenario +Effect_S: TypeAlias = _Effect # Scenario # Scalar (no dimensions) Scalar: TypeAlias = int | float | np.integer | np.floating # Export public API __all__ = [ - 'Numeric_T', - 'Numeric_TS', - 'Numeric_PS', 'Numeric_TPS', - 'Bool_T', - 'Bool_TS', + 'Numeric_PS', + 'Numeric_S', + 'Bool_TPS', 'Bool_PS', - 'Effect_T', - 'Effect_TS', - 'Effect_PS', + 'Bool_S', 'Effect_TPS', + 'Effect_PS', + 'Effect_S', 'Scalar', ] From d4f4df03b07a9e4c5c57180e5c9d0056f28b13ff Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 10:37:53 +0100 Subject: [PATCH 22/35] Update type hints --- flixopt/modeling.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/flixopt/modeling.py b/flixopt/modeling.py index b2676db48..13b4c0e3e 100644 --- a/flixopt/modeling.py +++ b/flixopt/modeling.py @@ -118,7 +118,7 @@ def count_consecutive_states( class ModelingUtilities: @staticmethod def compute_consecutive_hours_in_state( - binary_values: TemporalData, + binary_values: xr.DataArray, hours_per_timestep: int | float, epsilon: float = None, ) -> float: @@ -202,7 +202,7 @@ def expression_tracking_variable( tracked_expression, name: str = None, short_name: str = None, - bounds: tuple[TemporalData, TemporalData] = None, + bounds: tuple[xr.DataArray, xr.DataArray] = None, coords: str | list[str] | None = None, ) -> tuple[linopy.Variable, linopy.Constraint]: """ @@ -241,11 +241,11 @@ def consecutive_duration_tracking( state_variable: linopy.Variable, name: str = None, short_name: str = None, - minimum_duration: TemporalData | None = None, - maximum_duration: TemporalData | None = None, + minimum_duration: xr.DataArray | None = None, + maximum_duration: xr.DataArray | None = None, duration_dim: str = 'time', - duration_per_step: int | float | TemporalData = None, - previous_duration: TemporalData = 0, + duration_per_step: int | float | xr.DataArray = None, + previous_duration: xr.DataArray = 0, ) -> tuple[linopy.Variable, tuple[linopy.Constraint, linopy.Constraint, linopy.Constraint]]: """ Creates consecutive duration tracking for a binary state variable. @@ -393,7 +393,7 @@ class BoundingPatterns: def basic_bounds( model: Submodel, variable: linopy.Variable, - bounds: tuple[TemporalData, TemporalData], + bounds: tuple[xr.DataArray, xr.DataArray], name: str = None, ) -> list[linopy.constraints.Constraint]: """Create simple bounds. @@ -425,7 +425,7 @@ def basic_bounds( def bounds_with_state( model: Submodel, variable: linopy.Variable, - bounds: tuple[TemporalData, TemporalData], + bounds: tuple[xr.DataArray, xr.DataArray], variable_state: linopy.Variable, name: str = None, ) -> list[linopy.Constraint]: @@ -472,7 +472,7 @@ def scaled_bounds( model: Submodel, variable: linopy.Variable, scaling_variable: linopy.Variable, - relative_bounds: tuple[TemporalData, TemporalData], + relative_bounds: tuple[xr.DataArray, xr.DataArray], name: str = None, ) -> list[linopy.Constraint]: """Constraint a variable by scaling bounds, dependent on another variable. @@ -515,8 +515,8 @@ def scaled_bounds_with_state( model: Submodel, variable: linopy.Variable, scaling_variable: linopy.Variable, - relative_bounds: tuple[TemporalData, TemporalData], - scaling_bounds: tuple[TemporalData, TemporalData], + relative_bounds: tuple[xr.DataArray, xr.DataArray], + scaling_bounds: tuple[xr.DataArray, xr.DataArray], variable_state: linopy.Variable, name: str = None, ) -> list[linopy.Constraint]: From 540f1f8c7fdd344435c943dc7419dcf01b2faa24 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 10:47:51 +0100 Subject: [PATCH 23/35] Update type hints --- flixopt/__init__.py | 13 ++++++++++++- flixopt/components.py | 6 +++--- flixopt/effects.py | 14 ++++---------- flixopt/elements.py | 13 ++++++++++++- flixopt/features.py | 8 ++++---- flixopt/flow_system.py | 2 +- flixopt/interface.py | 6 ++---- flixopt/linear_converters.py | 2 +- 8 files changed, 39 insertions(+), 25 deletions(-) diff --git a/flixopt/__init__.py b/flixopt/__init__.py index 29d6813ab..b40855905 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -42,7 +42,18 @@ ) # Type system for dimension-aware type hints -from .types import Numeric_TPS, Numeric_PS, Numeric_S, Bool_TPS, Bool_PS, Bool_S, Effect_TPS, Effect_PS, Effect_S, Scalar +from .types import ( + Bool_PS, + Bool_S, + Bool_TPS, + Effect_PS, + Effect_S, + Effect_TPS, + Numeric_PS, + Numeric_S, + Numeric_TPS, + Scalar, +) # === Runtime warning suppression for third-party libraries === # These warnings are from dependencies and cannot be fixed by end users. diff --git a/flixopt/components.py b/flixopt/components.py index b22b0caf5..6a5abfc4e 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -23,7 +23,7 @@ import linopy from .flow_system import FlowSystem - from .types import NumericData, Period, Scenario, Time + from .types import Numeric_PS, Numeric_TPS logger = logging.getLogger('flixopt') @@ -170,7 +170,7 @@ def __init__( inputs: list[Flow], outputs: list[Flow], on_off_parameters: OnOffParameters | None = None, - conversion_factors: list[dict[str, NumericData[Time, Scenario]]] | None = None, + conversion_factors: list[dict[str, Numeric_TPS]] | None = None, piecewise_conversion: PiecewiseConversion | None = None, meta_data: dict | None = None, ): @@ -917,7 +917,7 @@ def _initial_and_final_charge_state(self): ) @property - def _absolute_charge_state_bounds(self) -> tuple[TemporalData, TemporalData]: + def _absolute_charge_state_bounds(self) -> tuple[xr.DataArray, xr.DataArray]: relative_lower_bound, relative_upper_bound = self._relative_charge_state_bounds if not isinstance(self.element.capacity_in_flow_hours, InvestParameters): return ( diff --git a/flixopt/effects.py b/flixopt/effects.py index 3aa0117e1..fc77a3169 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -23,7 +23,7 @@ from collections.abc import Iterator from .flow_system import FlowSystem - from .types import EffectData, NumericData, Period, Scalar, Scenario, Time + from .types import Effect_PS, Effect_TPS, Numeric_PS, Numeric_TPS, Scalar logger = logging.getLogger('flixopt') @@ -196,12 +196,8 @@ def __init__( self.description = description self.is_standard = is_standard self.is_objective = is_objective - self.share_from_temporal: Effect_TPS = ( - share_from_temporal if share_from_temporal is not None else {} - ) - self.share_from_periodic: Effect_PS = ( - share_from_periodic if share_from_periodic is not None else {} - ) + self.share_from_temporal: Effect_TPS = share_from_temporal if share_from_temporal is not None else {} + self.share_from_periodic: Effect_PS = share_from_periodic if share_from_periodic is not None else {} # Handle backwards compatibility for deprecated parameters using centralized helper minimum_temporal = self._handle_deprecated_kwarg( @@ -490,9 +486,7 @@ def add_effects(self, *effects: Effect) -> None: self.add(effect) # Use the inherited add() method from ElementContainer logger.info(f'Registered new Effect: {effect.label}') - def create_effect_values_dict( - self, effect_values_user: Effect_TPS - ) -> dict[str, Numeric_TPS] | None: + def create_effect_values_dict(self, effect_values_user: Numeric_TPS | Effect_TPS | None) -> Effect_TPS | None: """Converts effect values into a dictionary. If a scalar is provided, it is associated with a default effect type. Examples: diff --git a/flixopt/elements.py b/flixopt/elements.py index 37e208435..224cc0f9c 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -23,7 +23,18 @@ import linopy from .flow_system import FlowSystem - from .types import Numeric_TPS, Numeric_PS, Numeric_S, Bool_TPS, Bool_PS, Bool_S, Effect_TPS, Effect_PS, Effect_S, Scalar + from .types import ( + Bool_PS, + Bool_S, + Bool_TPS, + Effect_PS, + Effect_S, + Effect_TPS, + Numeric_PS, + Numeric_S, + Numeric_TPS, + Scalar, + ) logger = logging.getLogger('flixopt') diff --git a/flixopt/features.py b/flixopt/features.py index 172b3dc24..e42b148e1 100644 --- a/flixopt/features.py +++ b/flixopt/features.py @@ -17,7 +17,7 @@ if TYPE_CHECKING: from .core import FlowSystemDimensions, Scalar, TemporalData from .interface import InvestParameters, OnOffParameters, Piecewise - from .types import NumericData, Period, Scenario + from .types import Numeric_PS, Numeric_TPS logger = logging.getLogger('flixopt') @@ -154,7 +154,7 @@ def __init__( label_of_element: str, parameters: OnOffParameters, on_variable: linopy.Variable, - previous_states: TemporalData | None, + previous_states: Numeric_TPS | None, label_of_model: str | None = None, ): """ @@ -520,8 +520,8 @@ def __init__( label_of_model: str | None = None, total_max: Numeric_PS | None = None, total_min: Numeric_PS | None = None, - max_per_hour: TemporalData | None = None, - min_per_hour: TemporalData | None = None, + max_per_hour: Numeric_TPS | None = None, + min_per_hour: Numeric_TPS | None = None, ): if 'time' not in dims and (max_per_hour is not None or min_per_hour is not None): raise ValueError('Both max_per_hour and min_per_hour cannot be used when has_time_dim is False') diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 0af1007f1..72da046fd 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -32,7 +32,7 @@ import pyvis - from .types import BoolData, EffectData, NumericData, Period, Scenario, Time + from .types import Bool_TPS, Effect_TPS, Numeric_PS, Numeric_TPS logger = logging.getLogger('flixopt') diff --git a/flixopt/interface.py b/flixopt/interface.py index 715f894f2..23f9b72ab 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -20,7 +20,7 @@ from collections.abc import Iterator from .flow_system import FlowSystem - from .types import EffectData, NumericData, Period, Scenario, Time + from .types import Effect_PS, Effect_TPS, Numeric_PS, Numeric_TPS logger = logging.getLogger('flixopt') @@ -1264,9 +1264,7 @@ class OnOffParameters(Interface): def __init__( self, effects_per_switch_on: Effect_TPS | Numeric_TPS | None = None, - effects_per_running_hour: Effect_TPS - | Numeric_TPS - | None = None, + effects_per_running_hour: Effect_TPS | Numeric_TPS | None = None, on_hours_total_min: int | None = None, on_hours_total_max: int | None = None, consecutive_on_hours_min: Numeric_TPS | None = None, diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 5a9e46f94..d59c68f09 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -16,7 +16,7 @@ if TYPE_CHECKING: from .elements import Flow from .interface import OnOffParameters - from .types import NumericData, Period, Scenario, Time + from .types import Numeric_TPS logger = logging.getLogger('flixopt') From cfceef61a4bbff83a6e83fc481a27a3ec1d04f11 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 10:54:39 +0100 Subject: [PATCH 24/35] Update type documentation --- flixopt/types.py | 250 +++++++++++++++++++++++++++++++++++++---------- 1 file changed, 197 insertions(+), 53 deletions(-) diff --git a/flixopt/types.py b/flixopt/types.py index 1411f3e30..a9ba8a63b 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -1,42 +1,56 @@ -""" -Type system for dimension-aware data in flixopt. - -This module provides type aliases that clearly communicate which dimensions -data can have. The type system is designed to be self-documenting while -maintaining maximum flexibility for input formats. - -Key Concepts ------------- -- Type aliases use suffix notation to indicate dimensions: - - `_T`: Time dimension only - - `_TS`: Time and Scenario dimensions - - `_PS`: Period and Scenario dimensions (no time) - - `_TPS`: Time, Period, and Scenario dimensions -- Data can have any subset of the specified dimensions (including being scalar) -- All standard input formats are supported (scalar, array, Series, DataFrame, DataArray) - -Examples --------- -Type hint `Numeric_T` accepts: - - Scalar: `0.5` (broadcast to all timesteps) - - 1D array: `np.array([1, 2, 3])` (matched to time dimension) - - pandas Series: with DatetimeIndex matching flow system - - xarray DataArray: with 'time' dimension - -Type hint `Numeric_TS` accepts: - - Scalar: `100` (broadcast to all time and scenario combinations) - - 1D array: matched to time OR scenario dimension - - 2D array: matched to both dimensions - - pandas DataFrame: columns as scenarios, index as time - - xarray DataArray: with any subset of 'time', 'scenario' dimensions - -Type hint `Numeric_PS` (periodic data, no time): - - Used for investment parameters that vary by planning period - - Accepts scalars, arrays matching periods/scenarios, or DataArrays - -Type hint `Scalar`: - - Only numeric scalars (int, float) - - Not converted to DataArray, stays as scalar +"""Type system for dimension-aware data in flixopt. + +This module provides type aliases that clearly communicate which dimensions data can +have, making function signatures self-documenting while maintaining maximum flexibility +for input formats. + +The type system uses suffix notation to indicate maximum dimensions: + - ``_TPS``: Time, Period, and Scenario dimensions + - ``_PS``: Period and Scenario dimensions (no time) + - ``_S``: Scenario dimension only + - No suffix: Scalar values only + +All dimensioned types accept any subset of their specified dimensions, including scalars +which are automatically broadcast to all dimensions. + +Supported Input Formats: + - Scalars: ``int``, ``float`` (including numpy types) + - Arrays: ``numpy.ndarray`` (matched by length/shape to dimensions) + - Series: ``pandas.Series`` (matched by index to dimension coordinates) + - DataFrames: ``pandas.DataFrame`` (typically columns=scenarios, index=time) + - DataArrays: ``xarray.DataArray`` (used directly with dimension validation) + +Example: + Basic usage with different dimension combinations:: + ```python + from flixopt.types import Numeric_TPS, Numeric_PS, Scalar + + def create_flow( + label: str, + size: Numeric_PS = None, # Can be scalar, array, Series, etc. + profile: Numeric_TPS = 1.0, # Accepts time-varying data + efficiency: Scalar = 0.95, # Only scalars + ): + ... + + # All of these are valid: + create_flow("heat", size=100) # Scalar broadcast + create_flow("heat", size=np.array([100, 150])) # Period-varying + create_flow("heat", profile=pd.DataFrame(...)) # Time + scenario + ``` + +Note: + Data can have **any subset** of the specified dimensions. For example, + ``Numeric_TPS`` can accept: + - Scalar: ``0.5`` (broadcast to all time, period, scenario combinations) + - 1D array: matched to one dimension + - 2D array: matched to two dimensions + - 3D array: matched to all three dimensions + - ``xarray.DataArray``: with any subset of 'time', 'period', 'scenario' dims + +See Also: + DataConverter.to_dataarray: Implementation of data conversion logic + FlowSystem.fit_to_model_coords: Fits data to model coordinate system """ from typing import TypeAlias @@ -45,28 +59,158 @@ import pandas as pd import xarray as xr -# Internal base types +# Internal base types - not exported _Numeric: TypeAlias = int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray +"""Base numeric type union accepting scalars, arrays, Series, DataFrames, and DataArrays.""" + _Bool: TypeAlias = bool | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray +"""Base boolean type union accepting bool scalars, arrays, Series, DataFrames, and DataArrays.""" + _Effect: TypeAlias = _Numeric | dict[str, _Numeric] +"""Base effect type union accepting numeric data or dict of numeric data for named effects.""" + + +# Numeric data type aliases with dimension combinations +Numeric_TPS: TypeAlias = _Numeric +"""Numeric data with at most Time, Period, and Scenario dimensions. + +Use this for data that can vary across time steps, planning periods, and scenarios. +Accepts any subset of these dimensions including scalars (broadcast to all dimensions). + +Example: + :: -# Numeric data with dimension combinations -Numeric_TPS: TypeAlias = _Numeric # Time, Period, Scenario -Numeric_PS: TypeAlias = _Numeric # Period, Scenario -Numeric_S: TypeAlias = _Numeric # Scenario + efficiency: Numeric_TPS = 0.95 # Scalar broadcast + efficiency: Numeric_TPS = np.array([...]) # Time-varying + efficiency: Numeric_TPS = pd.DataFrame(...) # Time + scenarios +""" + +Numeric_PS: TypeAlias = _Numeric +"""Numeric data with at most Period and Scenario dimensions (no time variation). + +Use this for investment parameters that vary by planning period and scenario but not +within each period (e.g., investment costs, capacity sizes). + +Example: + :: + + size: Numeric_PS = 100 # Scalar + size: Numeric_PS = np.array([100, 150, 200]) # Period-varying + size: Numeric_PS = pd.DataFrame(...) # Period + scenario combinations +""" + +Numeric_S: TypeAlias = _Numeric +"""Numeric data with at most Scenario dimension. + +Use this for scenario-specific parameters that don't vary over time or periods. -# Boolean data with dimension combinations -Bool_TPS: TypeAlias = _Bool # Time, Period, Scenario -Bool_PS: TypeAlias = _Bool # Period, Scenario -Bool_S: TypeAlias = _Bool # Scenario +Example: + :: + + discount_rate: Numeric_S = 0.05 # Same for all scenarios + discount_rate: Numeric_S = pd.Series([0.03, 0.05, 0.07]) # Scenario-varying +""" + + +# Boolean data type aliases with dimension combinations +Bool_TPS: TypeAlias = _Bool +"""Boolean data with at most Time, Period, and Scenario dimensions. + +Use this for binary flags or activation states that can vary across time, periods, +and scenarios (e.g., on/off constraints, feasibility indicators). + +Example: + :: + + is_active: Bool_TPS = True # Always active + is_active: Bool_TPS = np.array([True, False, True, ...]) # Time-varying +""" -# Effect data with dimension combinations -Effect_TPS: TypeAlias = _Effect # Time, Period, Scenario -Effect_PS: TypeAlias = _Effect # Period, Scenario -Effect_S: TypeAlias = _Effect # Scenario +Bool_PS: TypeAlias = _Bool +"""Boolean data with at most Period and Scenario dimensions. -# Scalar (no dimensions) +Use this for binary investment decisions or constraints that vary by period and +scenario but not within each period. + +Example: + :: + + can_invest: Bool_PS = True # Can invest in all periods + can_invest: Bool_PS = np.array([False, True, True]) # Period-specific +""" + +Bool_S: TypeAlias = _Bool +"""Boolean data with at most Scenario dimension. + +Use this for scenario-specific binary flags. + +Example: + :: + + high_demand: Bool_S = False # Same for all scenarios + high_demand: Bool_S = pd.Series([False, True, True]) # Scenario-varying +""" + + +# Effect data type aliases with dimension combinations +Effect_TPS: TypeAlias = _Effect +"""Effect data with at most Time, Period, and Scenario dimensions. + +Effects represent costs, emissions, or other impacts. Can be a single numeric value +or a dict mapping effect names to numeric values for multiple named effects. + +Example: + :: + + # Single effect + cost: Effect_TPS = 10.5 + cost: Effect_TPS = np.array([10, 12, 11, ...]) + + # Multiple named effects + effects: Effect_TPS = { + 'CO2': 0.5, + 'costs': np.array([100, 120, 110, ...]), + } +""" + +Effect_PS: TypeAlias = _Effect +"""Effect data with at most Period and Scenario dimensions. + +Use this for period-specific effects like investment costs or periodic emissions. + +Example: + :: + + investment_cost: Effect_PS = 1000 # Fixed cost + investment_cost: Effect_PS = {'capex': 1000, 'opex': 50} # Multiple effects +""" + +Effect_S: TypeAlias = _Effect +"""Effect data with at most Scenario dimension. + +Use this for scenario-specific effects. + +Example: + :: + + carbon_price: Effect_S = 50 # Same for all scenarios + carbon_price: Effect_S = pd.Series([30, 50, 70]) # Scenario-varying +""" + + +# Scalar type (no dimensions) Scalar: TypeAlias = int | float | np.integer | np.floating +"""Scalar numeric values only (no arrays or DataArrays). + +Use this when you specifically want to accept only scalar values, not arrays. +Unlike dimensioned types, scalars are not converted to DataArrays internally. + +Example: + :: + + efficiency: Scalar = 0.95 # OK + efficiency: Scalar = np.array([0.95]) # Type error - array not allowed +""" # Export public API __all__ = [ From d0fac14daf36a1133285d21069924f63ce679bc4 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 10:57:40 +0100 Subject: [PATCH 25/35] Update type documentation --- flixopt/types.py | 215 ++++++++++------------------------------------- 1 file changed, 45 insertions(+), 170 deletions(-) diff --git a/flixopt/types.py b/flixopt/types.py index a9ba8a63b..cc4069a2e 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -1,56 +1,38 @@ """Type system for dimension-aware data in flixopt. -This module provides type aliases that clearly communicate which dimensions data can -have, making function signatures self-documenting while maintaining maximum flexibility -for input formats. - -The type system uses suffix notation to indicate maximum dimensions: - - ``_TPS``: Time, Period, and Scenario dimensions - - ``_PS``: Period and Scenario dimensions (no time) - - ``_S``: Scenario dimension only - - No suffix: Scalar values only - -All dimensioned types accept any subset of their specified dimensions, including scalars -which are automatically broadcast to all dimensions. - -Supported Input Formats: - - Scalars: ``int``, ``float`` (including numpy types) - - Arrays: ``numpy.ndarray`` (matched by length/shape to dimensions) - - Series: ``pandas.Series`` (matched by index to dimension coordinates) - - DataFrames: ``pandas.DataFrame`` (typically columns=scenarios, index=time) - - DataArrays: ``xarray.DataArray`` (used directly with dimension validation) +Type aliases use suffix notation to indicate maximum dimensions. Data can have any +subset of these dimensions (including scalars, which are broadcast to all dimensions). + +| Suffix | Dimensions | Use Case | +|--------|------------|----------| +| `_TPS` | Time, Period, Scenario | Time-varying data across all dimensions | +| `_PS` | Period, Scenario | Investment parameters (no time variation) | +| `_S` | Scenario | Scenario-specific parameters | +| (none) | Scalar only | Single numeric values | + +All dimensioned types accept: scalars (`int`, `float`), arrays (`ndarray`), +Series (`pd.Series`), DataFrames (`pd.DataFrame`), or DataArrays (`xr.DataArray`). Example: - Basic usage with different dimension combinations:: - ```python - from flixopt.types import Numeric_TPS, Numeric_PS, Scalar - - def create_flow( - label: str, - size: Numeric_PS = None, # Can be scalar, array, Series, etc. - profile: Numeric_TPS = 1.0, # Accepts time-varying data - efficiency: Scalar = 0.95, # Only scalars - ): - ... - - # All of these are valid: - create_flow("heat", size=100) # Scalar broadcast - create_flow("heat", size=np.array([100, 150])) # Period-varying - create_flow("heat", profile=pd.DataFrame(...)) # Time + scenario - ``` + ```python + from flixopt.types import Numeric_TPS, Numeric_PS, Scalar + + def create_flow( + size: Numeric_PS = None, # Scalar, array, Series, DataFrame, or DataArray + profile: Numeric_TPS = 1.0, # Time-varying data + efficiency: Scalar = 0.95, # Scalars only + ): + ... + + # All valid: + create_flow(size=100) # Scalar broadcast + create_flow(size=np.array([100, 150])) # Period-varying + create_flow(profile=pd.DataFrame(...)) # Time + scenario + ``` Note: - Data can have **any subset** of the specified dimensions. For example, - ``Numeric_TPS`` can accept: - - Scalar: ``0.5`` (broadcast to all time, period, scenario combinations) - - 1D array: matched to one dimension - - 2D array: matched to two dimensions - - 3D array: matched to all three dimensions - - ``xarray.DataArray``: with any subset of 'time', 'period', 'scenario' dims - -See Also: - DataConverter.to_dataarray: Implementation of data conversion logic - FlowSystem.fit_to_model_coords: Fits data to model coordinate system + Data can have **any subset** of specified dimensions. `Numeric_TPS` accepts scalars, + 1D/2D/3D arrays, or DataArrays with any subset of 'time', 'period', 'scenario' dims. """ from typing import TypeAlias @@ -61,156 +43,49 @@ def create_flow( # Internal base types - not exported _Numeric: TypeAlias = int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray -"""Base numeric type union accepting scalars, arrays, Series, DataFrames, and DataArrays.""" - _Bool: TypeAlias = bool | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray -"""Base boolean type union accepting bool scalars, arrays, Series, DataFrames, and DataArrays.""" - _Effect: TypeAlias = _Numeric | dict[str, _Numeric] -"""Base effect type union accepting numeric data or dict of numeric data for named effects.""" -# Numeric data type aliases with dimension combinations +# Numeric data types Numeric_TPS: TypeAlias = _Numeric -"""Numeric data with at most Time, Period, and Scenario dimensions. - -Use this for data that can vary across time steps, planning periods, and scenarios. -Accepts any subset of these dimensions including scalars (broadcast to all dimensions). - -Example: - :: - - efficiency: Numeric_TPS = 0.95 # Scalar broadcast - efficiency: Numeric_TPS = np.array([...]) # Time-varying - efficiency: Numeric_TPS = pd.DataFrame(...) # Time + scenarios -""" +"""Time, Period, Scenario dimensions. For time-varying data across all dimensions.""" Numeric_PS: TypeAlias = _Numeric -"""Numeric data with at most Period and Scenario dimensions (no time variation). - -Use this for investment parameters that vary by planning period and scenario but not -within each period (e.g., investment costs, capacity sizes). - -Example: - :: - - size: Numeric_PS = 100 # Scalar - size: Numeric_PS = np.array([100, 150, 200]) # Period-varying - size: Numeric_PS = pd.DataFrame(...) # Period + scenario combinations -""" +"""Period, Scenario dimensions. For investment parameters (e.g., size, costs).""" Numeric_S: TypeAlias = _Numeric -"""Numeric data with at most Scenario dimension. +"""Scenario dimension. For scenario-specific parameters (e.g., discount rates).""" -Use this for scenario-specific parameters that don't vary over time or periods. -Example: - :: - - discount_rate: Numeric_S = 0.05 # Same for all scenarios - discount_rate: Numeric_S = pd.Series([0.03, 0.05, 0.07]) # Scenario-varying -""" - - -# Boolean data type aliases with dimension combinations +# Boolean data types Bool_TPS: TypeAlias = _Bool -"""Boolean data with at most Time, Period, and Scenario dimensions. - -Use this for binary flags or activation states that can vary across time, periods, -and scenarios (e.g., on/off constraints, feasibility indicators). - -Example: - :: - - is_active: Bool_TPS = True # Always active - is_active: Bool_TPS = np.array([True, False, True, ...]) # Time-varying -""" +"""Time, Period, Scenario dimensions. For time-varying binary flags/constraints.""" Bool_PS: TypeAlias = _Bool -"""Boolean data with at most Period and Scenario dimensions. - -Use this for binary investment decisions or constraints that vary by period and -scenario but not within each period. - -Example: - :: - - can_invest: Bool_PS = True # Can invest in all periods - can_invest: Bool_PS = np.array([False, True, True]) # Period-specific -""" +"""Period, Scenario dimensions. For period-specific binary decisions.""" Bool_S: TypeAlias = _Bool -"""Boolean data with at most Scenario dimension. - -Use this for scenario-specific binary flags. +"""Scenario dimension. For scenario-specific binary flags.""" -Example: - :: - high_demand: Bool_S = False # Same for all scenarios - high_demand: Bool_S = pd.Series([False, True, True]) # Scenario-varying -""" - - -# Effect data type aliases with dimension combinations +# Effect data types Effect_TPS: TypeAlias = _Effect -"""Effect data with at most Time, Period, and Scenario dimensions. - -Effects represent costs, emissions, or other impacts. Can be a single numeric value -or a dict mapping effect names to numeric values for multiple named effects. - -Example: - :: - - # Single effect - cost: Effect_TPS = 10.5 - cost: Effect_TPS = np.array([10, 12, 11, ...]) - - # Multiple named effects - effects: Effect_TPS = { - 'CO2': 0.5, - 'costs': np.array([100, 120, 110, ...]), - } -""" +"""Time, Period, Scenario dimensions. For time-varying effects (costs, emissions). +Can be single numeric value or dict mapping effect names to values.""" Effect_PS: TypeAlias = _Effect -"""Effect data with at most Period and Scenario dimensions. - -Use this for period-specific effects like investment costs or periodic emissions. - -Example: - :: - - investment_cost: Effect_PS = 1000 # Fixed cost - investment_cost: Effect_PS = {'capex': 1000, 'opex': 50} # Multiple effects -""" +"""Period, Scenario dimensions. For period-specific effects (investment costs). +Can be single numeric value or dict mapping effect names to values.""" Effect_S: TypeAlias = _Effect -"""Effect data with at most Scenario dimension. - -Use this for scenario-specific effects. - -Example: - :: - - carbon_price: Effect_S = 50 # Same for all scenarios - carbon_price: Effect_S = pd.Series([30, 50, 70]) # Scenario-varying -""" +"""Scenario dimension. For scenario-specific effects (carbon prices). +Can be single numeric value or dict mapping effect names to values.""" # Scalar type (no dimensions) Scalar: TypeAlias = int | float | np.integer | np.floating -"""Scalar numeric values only (no arrays or DataArrays). - -Use this when you specifically want to accept only scalar values, not arrays. -Unlike dimensioned types, scalars are not converted to DataArrays internally. - -Example: - :: - - efficiency: Scalar = 0.95 # OK - efficiency: Scalar = np.array([0.95]) # Type error - array not allowed -""" +"""Scalar numeric values only. Not converted to DataArray (unlike dimensioned types).""" # Export public API __all__ = [ From 1187a2ca827bd176f9d895e4f8f1be01a0f8a085 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 10:59:32 +0100 Subject: [PATCH 26/35] Update type documentation --- flixopt/types.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/flixopt/types.py b/flixopt/types.py index cc4069a2e..0a8ef66d9 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -30,9 +30,11 @@ def create_flow( create_flow(profile=pd.DataFrame(...)) # Time + scenario ``` -Note: - Data can have **any subset** of specified dimensions. `Numeric_TPS` accepts scalars, - 1D/2D/3D arrays, or DataArrays with any subset of 'time', 'period', 'scenario' dims. +Important: + Data can have **any subset** of specified dimensions, but **cannot have more + dimensions than the FlowSystem**. If the FlowSystem has only time dimension, + you cannot pass period or scenario data. The type hints indicate the maximum + dimensions that could be used if they exist in the FlowSystem. """ from typing import TypeAlias From 8abf6fb175c1c41eef14cc402980bd349617b784 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 11:12:11 +0100 Subject: [PATCH 27/35] Add another datatype --- flixopt/core.py | 13 +++---------- flixopt/features.py | 2 +- flixopt/io.py | 4 +++- flixopt/types.py | 5 +++++ 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/flixopt/core.py b/flixopt/core.py index c10248c6c..0d70e255b 100644 --- a/flixopt/core.py +++ b/flixopt/core.py @@ -12,6 +12,8 @@ import pandas as pd import xarray as xr +from .types import NumericOrBool + logger = logging.getLogger('flixopt') FlowSystemDimensions = Literal['time', 'period', 'scenario'] @@ -387,16 +389,7 @@ def _broadcast_dataarray_to_target_specification( @classmethod def to_dataarray( cls, - data: int - | float - | bool - | np.integer - | np.floating - | np.bool_ - | np.ndarray - | pd.Series - | pd.DataFrame - | xr.DataArray, + data: NumericOrBool, coords: dict[str, pd.Index] | None = None, ) -> xr.DataArray: """ diff --git a/flixopt/features.py b/flixopt/features.py index e42b148e1..b00ccc547 100644 --- a/flixopt/features.py +++ b/flixopt/features.py @@ -15,7 +15,7 @@ from .structure import FlowSystemModel, Submodel if TYPE_CHECKING: - from .core import FlowSystemDimensions, Scalar, TemporalData + from .core import FlowSystemDimensions from .interface import InvestParameters, OnOffParameters, Piecewise from .types import Numeric_PS, Numeric_TPS diff --git a/flixopt/io.py b/flixopt/io.py index 3c53c4170..e83738d89 100644 --- a/flixopt/io.py +++ b/flixopt/io.py @@ -19,6 +19,8 @@ if TYPE_CHECKING: import linopy + from .types import Numeric_TPS + logger = logging.getLogger('flixopt') @@ -651,7 +653,7 @@ def update(self, new_name: str | None = None, new_folder: pathlib.Path | None = def numeric_to_str_for_repr( - value: int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray, + value: Numeric_TPS, precision: int = 1, atol: float = 1e-10, ) -> str: diff --git a/flixopt/types.py b/flixopt/types.py index 0a8ef66d9..b5c92a5fe 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -48,6 +48,10 @@ def create_flow( _Bool: TypeAlias = bool | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray _Effect: TypeAlias = _Numeric | dict[str, _Numeric] +# Combined type for numeric or boolean data (no dimension information) +NumericOrBool: TypeAlias = int | float | bool | np.integer | np.floating | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray +"""Numeric or boolean data without dimension metadata. For internal utilities.""" + # Numeric data types Numeric_TPS: TypeAlias = _Numeric @@ -101,4 +105,5 @@ def create_flow( 'Effect_PS', 'Effect_S', 'Scalar', + 'NumericOrBool', ] From 73dc336747721d7d4d86ebe207494073340ef83b Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 11:12:20 +0100 Subject: [PATCH 28/35] Fix typehints --- flixopt/interface.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flixopt/interface.py b/flixopt/interface.py index 23f9b72ab..bc7adabbc 100644 --- a/flixopt/interface.py +++ b/flixopt/interface.py @@ -1265,13 +1265,13 @@ def __init__( self, effects_per_switch_on: Effect_TPS | Numeric_TPS | None = None, effects_per_running_hour: Effect_TPS | Numeric_TPS | None = None, - on_hours_total_min: int | None = None, - on_hours_total_max: int | None = None, + on_hours_total_min: Numeric_PS | None = None, + on_hours_total_max: Numeric_PS | None = None, consecutive_on_hours_min: Numeric_TPS | None = None, consecutive_on_hours_max: Numeric_TPS | None = None, consecutive_off_hours_min: Numeric_TPS | None = None, consecutive_off_hours_max: Numeric_TPS | None = None, - switch_on_total_max: int | None = None, + switch_on_total_max: Numeric_PS | None = None, force_switch_on: bool = False, ): self.effects_per_switch_on = effects_per_switch_on if effects_per_switch_on is not None else {} From c5afcbd824b8fa9b4f28977759f52f87f6a445ca Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 11:14:22 +0100 Subject: [PATCH 29/35] Fix typehints --- flixopt/effects.py | 4 ++-- flixopt/flow_system.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/flixopt/effects.py b/flixopt/effects.py index fc77a3169..d428cccd2 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -52,10 +52,10 @@ class Effect(Element): Only one effect can be marked as objective per optimization. share_from_temporal: Temporal cross-effect contributions. Maps temporal contributions from other effects to this effect. - Type: `TemporalEffectsUser` (single value or dict with dimensions [Time, Period, Scenario]) + Type: `Effect_TPS` (single value or dict with dimensions [Time, Period, Scenario]) share_from_periodic: Periodic cross-effect contributions. Maps periodic contributions from other effects to this effect. - Type: `PeriodicEffectsUser` (single value or dict with dimensions [Period, Scenario]) + Type: `Effect_PS` (single value or dict with dimensions [Period, Scenario]) minimum_temporal: Minimum allowed total contribution across all timesteps. Type: `Numeric_PS` (sum over time, can vary by period/scenario) maximum_temporal: Maximum allowed total contribution across all timesteps. diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 72da046fd..081359076 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -32,7 +32,7 @@ import pyvis - from .types import Bool_TPS, Effect_TPS, Numeric_PS, Numeric_TPS + from .types import Bool_TPS, Effect_TPS, Numeric_PS, Numeric_TPS, NumericOrBool logger = logging.getLogger('flixopt') @@ -523,7 +523,7 @@ def to_json(self, path: str | pathlib.Path): def fit_to_model_coords( self, name: str, - data: Numeric_TPS | Bool_TPS | None, + data: NumericOrBool | None, dims: Collection[FlowSystemDimensions] | None = None, ) -> xr.DataArray | None: """ @@ -531,7 +531,7 @@ def fit_to_model_coords( Args: name: Name of the data - data: Data to fit to model coordinates + data: Data to fit to model coordinates (accepts any dimensionality including scalars) dims: Collection of dimension names to use for fitting. If None, all dimensions are used. Returns: From 559a9fe486d6e64d8850629ffc76e83a04f47705 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 11:15:01 +0100 Subject: [PATCH 30/35] Fix validation in linear_converter classes --- flixopt/linear_converters.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index d59c68f09..364af1b1d 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -87,12 +87,12 @@ def __init__( label, inputs=[Q_fu], outputs=[Q_th], - conversion_factors=[{Q_fu.label: eta, Q_th.label: 1}], on_off_parameters=on_off_parameters, meta_data=meta_data, ) self.Q_fu = Q_fu self.Q_th = Q_th + self.eta = eta #Uses setter @property def eta(self): @@ -101,7 +101,7 @@ def eta(self): @eta.setter def eta(self, value): check_bounds(value, 'eta', self.label_full, 0, 1) - self.conversion_factors[0][self.Q_fu.label] = value + self.conversion_factors = [{self.Q_fu.label: value, self.Q_th.label: 1}] @register_class_for_io @@ -563,16 +563,14 @@ def __init__( label, inputs=[P_el, Q_ab], outputs=[Q_th], - conversion_factors=[{P_el.label: COP, Q_th.label: 1}, {Q_ab.label: COP / (COP - 1), Q_th.label: 1}], + conversion_factors=[], on_off_parameters=on_off_parameters, meta_data=meta_data, ) self.P_el = P_el self.Q_ab = Q_ab self.Q_th = Q_th - - if np.any(np.asarray(self.COP) <= 1): - raise ValueError(f'{self.label_full}.COP must be strictly > 1 for HeatPumpWithSource.') + self.COP = COP # Uses setter @property def COP(self): # noqa: N802 From 94a0ca4ff283ebf73897eaa33e261e0b255fbbe7 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 11:23:36 +0100 Subject: [PATCH 31/35] Fix validation in linear_converter classes --- flixopt/linear_converters.py | 35 ++++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 364af1b1d..040675b76 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -174,13 +174,13 @@ def __init__( label, inputs=[P_el], outputs=[Q_th], - conversion_factors=[{P_el.label: eta, Q_th.label: 1}], on_off_parameters=on_off_parameters, meta_data=meta_data, ) self.P_el = P_el self.Q_th = Q_th + self.eta = eta # Uses setter @property def eta(self): @@ -189,7 +189,7 @@ def eta(self): @eta.setter def eta(self, value): check_bounds(value, 'eta', self.label_full, 0, 1) - self.conversion_factors[0][self.P_el.label] = value + self.conversion_factors = [{self.P_el.label: value, self.Q_th.label: 1}] @register_class_for_io @@ -261,13 +261,13 @@ def __init__( label, inputs=[P_el], outputs=[Q_th], - conversion_factors=[{P_el.label: COP, Q_th.label: 1}], + conversion_factors=[], on_off_parameters=on_off_parameters, meta_data=meta_data, ) self.P_el = P_el self.Q_th = Q_th - self.COP = COP + self.COP = COP # Uses setter @property def COP(self): # noqa: N802 @@ -276,7 +276,7 @@ def COP(self): # noqa: N802 @COP.setter def COP(self, value): # noqa: N802 check_bounds(value, 'COP', self.label_full, 1, 20) - self.conversion_factors[0][self.P_el.label] = value + self.conversion_factors = [{self.P_el.label: value, self.Q_th.label: 1}] @register_class_for_io @@ -350,15 +350,13 @@ def __init__( label, inputs=[P_el, Q_th], outputs=[], - conversion_factors=[{P_el.label: -1, Q_th.label: specific_electricity_demand}], on_off_parameters=on_off_parameters, meta_data=meta_data, ) self.P_el = P_el self.Q_th = Q_th - - check_bounds(specific_electricity_demand, 'specific_electricity_demand', self.label_full, 0, 1) + self.specific_electricity_demand = specific_electricity_demand # Uses setter @property def specific_electricity_demand(self): @@ -367,7 +365,7 @@ def specific_electricity_demand(self): @specific_electricity_demand.setter def specific_electricity_demand(self, value): check_bounds(value, 'specific_electricity_demand', self.label_full, 0, 1) - self.conversion_factors[0][self.Q_th.label] = value + self.conversion_factors = [{self.P_el.label: -1, self.Q_th.label: value}] @register_class_for_io @@ -446,14 +444,11 @@ def __init__( on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, ): - heat = {Q_fu.label: eta_th, Q_th.label: 1} - electricity = {Q_fu.label: eta_el, P_el.label: 1} - super().__init__( label, inputs=[Q_fu], outputs=[Q_th, P_el], - conversion_factors=[heat, electricity], + conversion_factors=[], on_off_parameters=on_off_parameters, meta_data=meta_data, ) @@ -461,6 +456,8 @@ def __init__( self.Q_fu = Q_fu self.P_el = P_el self.Q_th = Q_th + self.eta_th = eta_th # Uses setter + self.eta_el = eta_el # Uses setter check_bounds(eta_el + eta_th, 'eta_th+eta_el', self.label_full, 0, 1) @@ -471,7 +468,11 @@ def eta_th(self): @eta_th.setter def eta_th(self, value): check_bounds(value, 'eta_th', self.label_full, 0, 1) - self.conversion_factors[0][self.Q_fu.label] = value + if len(self.conversion_factors) < 2: + # Initialize structure if not yet set + self.conversion_factors = [{self.Q_fu.label: value, self.Q_th.label: 1}, {}] + else: + self.conversion_factors[0] = {self.Q_fu.label: value, self.Q_th.label: 1} @property def eta_el(self): @@ -480,7 +481,11 @@ def eta_el(self): @eta_el.setter def eta_el(self, value): check_bounds(value, 'eta_el', self.label_full, 0, 1) - self.conversion_factors[1][self.Q_fu.label] = value + if len(self.conversion_factors) < 2: + # Initialize structure if not yet set + self.conversion_factors = [{}, {self.Q_fu.label: value, self.P_el.label: 1}] + else: + self.conversion_factors[1] = {self.Q_fu.label: value, self.P_el.label: 1} @register_class_for_io From ef1a2eea2cc9554d7a8c84709e830d76def37725 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 11:26:54 +0100 Subject: [PATCH 32/35] pre commit run --- flixopt/linear_converters.py | 2 +- flixopt/types.py | 21 ++++++++++++--------- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 040675b76..44fe11795 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -92,7 +92,7 @@ def __init__( ) self.Q_fu = Q_fu self.Q_th = Q_th - self.eta = eta #Uses setter + self.eta = eta # Uses setter @property def eta(self): diff --git a/flixopt/types.py b/flixopt/types.py index b5c92a5fe..f53d308c4 100644 --- a/flixopt/types.py +++ b/flixopt/types.py @@ -17,17 +17,18 @@ ```python from flixopt.types import Numeric_TPS, Numeric_PS, Scalar + def create_flow( - size: Numeric_PS = None, # Scalar, array, Series, DataFrame, or DataArray - profile: Numeric_TPS = 1.0, # Time-varying data - efficiency: Scalar = 0.95, # Scalars only - ): - ... + size: Numeric_PS = None, # Scalar, array, Series, DataFrame, or DataArray + profile: Numeric_TPS = 1.0, # Time-varying data + efficiency: Scalar = 0.95, # Scalars only + ): ... + # All valid: - create_flow(size=100) # Scalar broadcast - create_flow(size=np.array([100, 150])) # Period-varying - create_flow(profile=pd.DataFrame(...)) # Time + scenario + create_flow(size=100) # Scalar broadcast + create_flow(size=np.array([100, 150])) # Period-varying + create_flow(profile=pd.DataFrame(...)) # Time + scenario ``` Important: @@ -49,7 +50,9 @@ def create_flow( _Effect: TypeAlias = _Numeric | dict[str, _Numeric] # Combined type for numeric or boolean data (no dimension information) -NumericOrBool: TypeAlias = int | float | bool | np.integer | np.floating | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray +NumericOrBool: TypeAlias = ( + int | float | bool | np.integer | np.floating | np.bool_ | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray +) """Numeric or boolean data without dimension metadata. For internal utilities.""" From 08ce29db230b72bca69448f3bc7afb43e2d33590 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 11:35:12 +0100 Subject: [PATCH 33/35] Step 1 --- flixopt/linear_converters.py | 228 +++++++++++++++++++++-------------- 1 file changed, 137 insertions(+), 91 deletions(-) diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 44fe11795..3c731f19d 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -34,11 +34,13 @@ class Boiler(LinearConverter): label: The label of the Element. Used to identify it in the FlowSystem. eta: Thermal efficiency factor (0-1 range). Defines the ratio of thermal output to fuel input energy content. - Q_fu: Fuel input-flow representing fuel consumption. - Q_th: Thermal output-flow representing heat generation. + fuel_flow: Fuel input-flow representing fuel consumption. + thermal_flow: Thermal output-flow representing heat generation. on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. + Q_fu: *Deprecated*. Use `fuel_flow` instead. + Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Natural gas boiler: @@ -47,8 +49,8 @@ class Boiler(LinearConverter): gas_boiler = Boiler( label='natural_gas_boiler', eta=0.85, # 85% thermal efficiency - Q_fu=natural_gas_flow, - Q_th=hot_water_flow, + fuel_flow=natural_gas_flow, + thermal_flow=hot_water_flow, ) ``` @@ -58,8 +60,8 @@ class Boiler(LinearConverter): biomass_boiler = Boiler( label='wood_chip_boiler', eta=seasonal_efficiency_profile, # Time-varying efficiency - Q_fu=biomass_flow, - Q_th=district_heat_flow, + fuel_flow=biomass_flow, + thermal_flow=district_heat_flow, on_off_parameters=OnOffParameters( consecutive_on_hours_min=4, # Minimum 4-hour operation effects_per_switch_on={'startup_fuel': 50}, # Startup fuel penalty @@ -68,7 +70,7 @@ class Boiler(LinearConverter): ``` Note: - The conversion relationship is: Q_th = Q_fu × eta + The conversion relationship is: thermal_flow = fuel_flow × eta Efficiency should be between 0 and 1, where 1 represents perfect conversion (100% of fuel energy converted to useful thermal output). @@ -78,20 +80,25 @@ def __init__( self, label: str, eta: Numeric_TPS, - Q_fu: Flow, - Q_th: Flow, + fuel_flow: Flow | None = None, + thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, + **kwargs, ): + # Handle deprecated parameters + fuel_flow = self._handle_deprecated_kwarg(kwargs, 'Q_fu', 'fuel_flow', fuel_flow) + thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + super().__init__( label, - inputs=[Q_fu], - outputs=[Q_th], + inputs=[fuel_flow], + outputs=[thermal_flow], on_off_parameters=on_off_parameters, meta_data=meta_data, ) - self.Q_fu = Q_fu - self.Q_th = Q_th + self.Q_fu = fuel_flow + self.Q_th = thermal_flow self.eta = eta # Uses setter @property @@ -119,11 +126,13 @@ class Power2Heat(LinearConverter): eta: Thermal efficiency factor (0-1 range). For resistance heating this is typically close to 1.0 (nearly 100% efficiency), but may be lower for electrode boilers or systems with distribution losses. - P_el: Electrical input-flow representing electricity consumption. - Q_th: Thermal output-flow representing heat generation. + power_flow: Electrical input-flow representing electricity consumption. + thermal_flow: Thermal output-flow representing heat generation. on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. + P_el: *Deprecated*. Use `power_flow` instead. + Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Electric resistance heater: @@ -132,8 +141,8 @@ class Power2Heat(LinearConverter): electric_heater = Power2Heat( label='resistance_heater', eta=0.98, # 98% efficiency (small losses) - P_el=electricity_flow, - Q_th=space_heating_flow, + power_flow=electricity_flow, + thermal_flow=space_heating_flow, ) ``` @@ -143,8 +152,8 @@ class Power2Heat(LinearConverter): electrode_boiler = Power2Heat( label='electrode_steam_boiler', eta=0.95, # 95% efficiency including boiler losses - P_el=industrial_electricity, - Q_th=process_steam_flow, + power_flow=industrial_electricity, + thermal_flow=process_steam_flow, on_off_parameters=OnOffParameters( consecutive_on_hours_min=1, # Minimum 1-hour operation effects_per_switch_on={'startup_cost': 100}, @@ -153,7 +162,7 @@ class Power2Heat(LinearConverter): ``` Note: - The conversion relationship is: Q_th = P_el × eta + The conversion relationship is: thermal_flow = power_flow × eta Unlike heat pumps, Power2Heat systems cannot exceed 100% efficiency (eta ≤ 1.0) as they only convert electrical energy without extracting additional energy @@ -165,21 +174,26 @@ def __init__( self, label: str, eta: Numeric_TPS, - P_el: Flow, - Q_th: Flow, + power_flow: Flow | None = None, + thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, + **kwargs, ): + # Handle deprecated parameters + power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) + thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + super().__init__( label, - inputs=[P_el], - outputs=[Q_th], + inputs=[power_flow], + outputs=[thermal_flow], on_off_parameters=on_off_parameters, meta_data=meta_data, ) - self.P_el = P_el - self.Q_th = Q_th + self.P_el = power_flow + self.Q_th = thermal_flow self.eta = eta # Uses setter @property @@ -207,11 +221,13 @@ class HeatPump(LinearConverter): COP: Coefficient of Performance (typically 1-20 range). Defines the ratio of thermal output to electrical input. COP > 1 indicates the heat pump extracts additional energy from the environment. - P_el: Electrical input-flow representing electricity consumption. - Q_th: Thermal output-flow representing heat generation. + power_flow: Electrical input-flow representing electricity consumption. + thermal_flow: Thermal output-flow representing heat generation. on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. + P_el: *Deprecated*. Use `power_flow` instead. + Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Air-source heat pump with constant COP: @@ -220,8 +236,8 @@ class HeatPump(LinearConverter): air_hp = HeatPump( label='air_source_heat_pump', COP=3.5, # COP of 3.5 (350% efficiency) - P_el=electricity_flow, - Q_th=heating_flow, + power_flow=electricity_flow, + thermal_flow=heating_flow, ) ``` @@ -231,8 +247,8 @@ class HeatPump(LinearConverter): ground_hp = HeatPump( label='geothermal_heat_pump', COP=temperature_dependent_cop, # Time-varying COP based on ground temp - P_el=electricity_flow, - Q_th=radiant_heating_flow, + power_flow=electricity_flow, + thermal_flow=radiant_heating_flow, on_off_parameters=OnOffParameters( consecutive_on_hours_min=2, # Avoid frequent cycling effects_per_running_hour={'maintenance': 0.5}, @@ -241,7 +257,7 @@ class HeatPump(LinearConverter): ``` Note: - The conversion relationship is: Q_th = P_el × COP + The conversion relationship is: thermal_flow = power_flow × COP COP should be greater than 1 for realistic heat pump operation, with typical values ranging from 2-6 depending on technology and operating conditions. @@ -252,21 +268,26 @@ def __init__( self, label: str, COP: Numeric_TPS, - P_el: Flow, - Q_th: Flow, + power_flow: Flow | None = None, + thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, + **kwargs, ): + # Handle deprecated parameters + power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) + thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + super().__init__( label, - inputs=[P_el], - outputs=[Q_th], + inputs=[power_flow], + outputs=[thermal_flow], conversion_factors=[], on_off_parameters=on_off_parameters, meta_data=meta_data, ) - self.P_el = P_el - self.Q_th = Q_th + self.P_el = power_flow + self.Q_th = thermal_flow self.COP = COP # Uses setter @property @@ -294,11 +315,13 @@ class CoolingTower(LinearConverter): specific_electricity_demand: Auxiliary electricity demand per unit of cooling power (dimensionless, typically 0.01-0.05 range). Represents the fraction of thermal power that must be supplied as electricity for fans and pumps. - P_el: Electrical input-flow representing electricity consumption for fans/pumps. - Q_th: Thermal input-flow representing waste heat to be rejected to environment. + power_flow: Electrical input-flow representing electricity consumption for fans/pumps. + thermal_flow: Thermal input-flow representing waste heat to be rejected to environment. on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. + P_el: *Deprecated*. Use `power_flow` instead. + Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Industrial cooling tower: @@ -307,8 +330,8 @@ class CoolingTower(LinearConverter): cooling_tower = CoolingTower( label='process_cooling_tower', specific_electricity_demand=0.025, # 2.5% auxiliary power - P_el=cooling_electricity, - Q_th=waste_heat_flow, + power_flow=cooling_electricity, + thermal_flow=waste_heat_flow, ) ``` @@ -318,8 +341,8 @@ class CoolingTower(LinearConverter): condenser_cooling = CoolingTower( label='power_plant_cooling', specific_electricity_demand=0.015, # 1.5% auxiliary power - P_el=auxiliary_electricity, - Q_th=condenser_waste_heat, + power_flow=auxiliary_electricity, + thermal_flow=condenser_waste_heat, on_off_parameters=OnOffParameters( consecutive_on_hours_min=4, # Minimum operation time effects_per_running_hour={'water_consumption': 2.5}, # m³/h @@ -328,7 +351,7 @@ class CoolingTower(LinearConverter): ``` Note: - The conversion relationship is: P_el = Q_th × specific_electricity_demand + The conversion relationship is: power_flow = thermal_flow × specific_electricity_demand The cooling tower consumes electrical power proportional to the thermal load. No thermal energy is produced - all thermal input is rejected to the environment. @@ -341,21 +364,26 @@ def __init__( self, label: str, specific_electricity_demand: Numeric_TPS, - P_el: Flow, - Q_th: Flow, + power_flow: Flow | None = None, + thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, + **kwargs, ): + # Handle deprecated parameters + power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) + thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + super().__init__( label, - inputs=[P_el, Q_th], + inputs=[power_flow, thermal_flow], outputs=[], on_off_parameters=on_off_parameters, meta_data=meta_data, ) - self.P_el = P_el - self.Q_th = Q_th + self.P_el = power_flow + self.Q_th = thermal_flow self.specific_electricity_demand = specific_electricity_demand # Uses setter @property @@ -384,12 +412,15 @@ class CHP(LinearConverter): energy converted to useful thermal output. eta_el: Electrical efficiency factor (0-1 range). Defines the fraction of fuel energy converted to electrical output. - Q_fu: Fuel input-flow representing fuel consumption. - P_el: Electrical output-flow representing electricity generation. - Q_th: Thermal output-flow representing heat generation. + fuel_flow: Fuel input-flow representing fuel consumption. + power_flow: Electrical output-flow representing electricity generation. + thermal_flow: Thermal output-flow representing heat generation. on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. + Q_fu: *Deprecated*. Use `fuel_flow` instead. + P_el: *Deprecated*. Use `power_flow` instead. + Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Natural gas CHP unit: @@ -399,9 +430,9 @@ class CHP(LinearConverter): label='natural_gas_chp', eta_th=0.45, # 45% thermal efficiency eta_el=0.35, # 35% electrical efficiency (80% total) - Q_fu=natural_gas_flow, - P_el=electricity_flow, - Q_th=district_heat_flow, + fuel_flow=natural_gas_flow, + power_flow=electricity_flow, + thermal_flow=district_heat_flow, ) ``` @@ -412,9 +443,9 @@ class CHP(LinearConverter): label='industrial_chp', eta_th=0.40, eta_el=0.38, - Q_fu=fuel_gas_flow, - P_el=plant_electricity, - Q_th=process_steam, + fuel_flow=fuel_gas_flow, + power_flow=plant_electricity, + thermal_flow=process_steam, on_off_parameters=OnOffParameters( consecutive_on_hours_min=8, # Minimum 8-hour operation effects_per_switch_on={'startup_cost': 5000}, @@ -425,8 +456,8 @@ class CHP(LinearConverter): Note: The conversion relationships are: - - Q_th = Q_fu × eta_th (thermal output) - - P_el = Q_fu × eta_el (electrical output) + - thermal_flow = fuel_flow × eta_th (thermal output) + - power_flow = fuel_flow × eta_el (electrical output) Total efficiency (eta_th + eta_el) should be ≤ 1.0, with typical combined efficiencies of 80-90% for modern CHP units. This provides significant @@ -438,24 +469,30 @@ def __init__( label: str, eta_th: Numeric_TPS, eta_el: Numeric_TPS, - Q_fu: Flow, - P_el: Flow, - Q_th: Flow, + fuel_flow: Flow | None = None, + power_flow: Flow | None = None, + thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, + **kwargs, ): + # Handle deprecated parameters + fuel_flow = self._handle_deprecated_kwarg(kwargs, 'Q_fu', 'fuel_flow', fuel_flow) + power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) + thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + super().__init__( label, - inputs=[Q_fu], - outputs=[Q_th, P_el], + inputs=[fuel_flow], + outputs=[thermal_flow, power_flow], conversion_factors=[], on_off_parameters=on_off_parameters, meta_data=meta_data, ) - self.Q_fu = Q_fu - self.P_el = P_el - self.Q_th = Q_th + self.Q_fu = fuel_flow + self.P_el = power_flow + self.Q_th = thermal_flow self.eta_th = eta_th # Uses setter self.eta_el = eta_el # Uses setter @@ -502,14 +539,17 @@ class HeatPumpWithSource(LinearConverter): label: The label of the Element. Used to identify it in the FlowSystem. COP: Coefficient of Performance (typically 1-20 range). Defines the ratio of thermal output to electrical input. The heat source extraction is automatically - calculated as Q_ab = Q_th × (COP-1)/COP. - P_el: Electrical input-flow representing electricity consumption for compressor. - Q_ab: Heat source input-flow representing thermal energy extracted from environment + calculated as heat_source_flow = thermal_flow × (COP-1)/COP. + power_flow: Electrical input-flow representing electricity consumption for compressor. + heat_source_flow: Heat source input-flow representing thermal energy extracted from environment (ground, air, water source). - Q_th: Thermal output-flow representing useful heat delivered to the application. + thermal_flow: Thermal output-flow representing useful heat delivered to the application. on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. + P_el: *Deprecated*. Use `power_flow` instead. + Q_ab: *Deprecated*. Use `heat_source_flow` instead. + Q_th: *Deprecated*. Use `thermal_flow` instead. Examples: Ground-source heat pump with explicit ground coupling: @@ -518,9 +558,9 @@ class HeatPumpWithSource(LinearConverter): ground_source_hp = HeatPumpWithSource( label='geothermal_heat_pump', COP=4.5, # High COP due to stable ground temperature - P_el=electricity_flow, - Q_ab=ground_heat_extraction, # Heat extracted from ground loop - Q_th=building_heating_flow, + power_flow=electricity_flow, + heat_source_flow=ground_heat_extraction, # Heat extracted from ground loop + thermal_flow=building_heating_flow, ) ``` @@ -530,9 +570,9 @@ class HeatPumpWithSource(LinearConverter): waste_heat_pump = HeatPumpWithSource( label='waste_heat_pump', COP=temperature_dependent_cop, # Varies with temperature of heat source - P_el=electricity_consumption, - Q_ab=industrial_heat_extraction, # Heat extracted from a industrial process or waste water - Q_th=heat_supply, + power_flow=electricity_consumption, + heat_source_flow=industrial_heat_extraction, # Heat extracted from a industrial process or waste water + thermal_flow=heat_supply, on_off_parameters=OnOffParameters( consecutive_on_hours_min=0.5, # 30-minute minimum runtime effects_per_switch_on={'costs': 1000}, @@ -542,9 +582,9 @@ class HeatPumpWithSource(LinearConverter): Note: The conversion relationships are: - - Q_th = P_el × COP (thermal output from electrical input) - - Q_ab = Q_th × (COP-1)/COP (heat source extraction) - - Energy balance: Q_th = P_el + Q_ab + - thermal_flow = power_flow × COP (thermal output from electrical input) + - heat_source_flow = thermal_flow × (COP-1)/COP (heat source extraction) + - Energy balance: thermal_flow = power_flow + heat_source_flow This formulation explicitly tracks the heat source, which is important for systems where the source capacity or temperature is limited, @@ -558,23 +598,29 @@ def __init__( self, label: str, COP: Numeric_TPS, - P_el: Flow, - Q_ab: Flow, - Q_th: Flow, + power_flow: Flow | None = None, + heat_source_flow: Flow | None = None, + thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, meta_data: dict | None = None, + **kwargs, ): + # Handle deprecated parameters + power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) + heat_source_flow = self._handle_deprecated_kwarg(kwargs, 'Q_ab', 'heat_source_flow', heat_source_flow) + thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + super().__init__( label, - inputs=[P_el, Q_ab], - outputs=[Q_th], + inputs=[power_flow, heat_source_flow], + outputs=[thermal_flow], conversion_factors=[], on_off_parameters=on_off_parameters, meta_data=meta_data, ) - self.P_el = P_el - self.Q_ab = Q_ab - self.Q_th = Q_th + self.P_el = power_flow + self.Q_ab = heat_source_flow + self.Q_th = thermal_flow self.COP = COP # Uses setter @property From 0b3394c0a241ccba9f2d6c6c230eb3592b439106 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 11:38:31 +0100 Subject: [PATCH 34/35] Step 2 --- flixopt/linear_converters.py | 89 +++++++++++++++++++----------------- 1 file changed, 46 insertions(+), 43 deletions(-) diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 3c731f19d..371810094 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -97,18 +97,18 @@ def __init__( on_off_parameters=on_off_parameters, meta_data=meta_data, ) - self.Q_fu = fuel_flow - self.Q_th = thermal_flow + self.fuel_flow = fuel_flow + self.thermal_flow = thermal_flow self.eta = eta # Uses setter @property def eta(self): - return self.conversion_factors[0][self.Q_fu.label] + return self.conversion_factors[0][self.fuel_flow.label] @eta.setter def eta(self, value): check_bounds(value, 'eta', self.label_full, 0, 1) - self.conversion_factors = [{self.Q_fu.label: value, self.Q_th.label: 1}] + self.conversion_factors = [{self.fuel_flow.label: value, self.thermal_flow.label: 1}] @register_class_for_io @@ -192,18 +192,18 @@ def __init__( meta_data=meta_data, ) - self.P_el = power_flow - self.Q_th = thermal_flow + self.power_flow = power_flow + self.thermal_flow = thermal_flow self.eta = eta # Uses setter @property def eta(self): - return self.conversion_factors[0][self.P_el.label] + return self.conversion_factors[0][self.power_flow.label] @eta.setter def eta(self, value): check_bounds(value, 'eta', self.label_full, 0, 1) - self.conversion_factors = [{self.P_el.label: value, self.Q_th.label: 1}] + self.conversion_factors = [{self.power_flow.label: value, self.thermal_flow.label: 1}] @register_class_for_io @@ -218,7 +218,7 @@ class HeatPump(LinearConverter): Args: label: The label of the Element. Used to identify it in the FlowSystem. - COP: Coefficient of Performance (typically 1-20 range). Defines the ratio of + cop: Coefficient of Performance (typically 1-20 range). Defines the ratio of thermal output to electrical input. COP > 1 indicates the heat pump extracts additional energy from the environment. power_flow: Electrical input-flow representing electricity consumption. @@ -226,6 +226,7 @@ class HeatPump(LinearConverter): on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. + COP: *Deprecated*. Use `cop` instead. P_el: *Deprecated*. Use `power_flow` instead. Q_th: *Deprecated*. Use `thermal_flow` instead. @@ -235,7 +236,7 @@ class HeatPump(LinearConverter): ```python air_hp = HeatPump( label='air_source_heat_pump', - COP=3.5, # COP of 3.5 (350% efficiency) + cop=3.5, # COP of 3.5 (350% efficiency) power_flow=electricity_flow, thermal_flow=heating_flow, ) @@ -246,7 +247,7 @@ class HeatPump(LinearConverter): ```python ground_hp = HeatPump( label='geothermal_heat_pump', - COP=temperature_dependent_cop, # Time-varying COP based on ground temp + cop=temperature_dependent_cop, # Time-varying COP based on ground temp power_flow=electricity_flow, thermal_flow=radiant_heating_flow, on_off_parameters=OnOffParameters( @@ -267,7 +268,7 @@ class HeatPump(LinearConverter): def __init__( self, label: str, - COP: Numeric_TPS, + cop: Numeric_TPS, power_flow: Flow | None = None, thermal_flow: Flow | None = None, on_off_parameters: OnOffParameters | None = None, @@ -277,6 +278,7 @@ def __init__( # Handle deprecated parameters power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + cop = self._handle_deprecated_kwarg(kwargs, 'COP', 'cop', cop) super().__init__( label, @@ -286,18 +288,18 @@ def __init__( on_off_parameters=on_off_parameters, meta_data=meta_data, ) - self.P_el = power_flow - self.Q_th = thermal_flow - self.COP = COP # Uses setter + self.power_flow = power_flow + self.thermal_flow = thermal_flow + self.cop = cop # Uses setter @property def COP(self): # noqa: N802 - return self.conversion_factors[0][self.P_el.label] + return self.conversion_factors[0][self.power_flow.label] @COP.setter def COP(self, value): # noqa: N802 check_bounds(value, 'COP', self.label_full, 1, 20) - self.conversion_factors = [{self.P_el.label: value, self.Q_th.label: 1}] + self.conversion_factors = [{self.power_flow.label: value, self.thermal_flow.label: 1}] @register_class_for_io @@ -382,18 +384,18 @@ def __init__( meta_data=meta_data, ) - self.P_el = power_flow - self.Q_th = thermal_flow + self.power_flow = power_flow + self.thermal_flow = thermal_flow self.specific_electricity_demand = specific_electricity_demand # Uses setter @property def specific_electricity_demand(self): - return self.conversion_factors[0][self.Q_th.label] + return self.conversion_factors[0][self.thermal_flow.label] @specific_electricity_demand.setter def specific_electricity_demand(self, value): check_bounds(value, 'specific_electricity_demand', self.label_full, 0, 1) - self.conversion_factors = [{self.P_el.label: -1, self.Q_th.label: value}] + self.conversion_factors = [{self.power_flow.label: -1, self.thermal_flow.label: value}] @register_class_for_io @@ -490,9 +492,9 @@ def __init__( meta_data=meta_data, ) - self.Q_fu = fuel_flow - self.P_el = power_flow - self.Q_th = thermal_flow + self.fuel_flow = fuel_flow + self.power_flow = power_flow + self.thermal_flow = thermal_flow self.eta_th = eta_th # Uses setter self.eta_el = eta_el # Uses setter @@ -500,29 +502,29 @@ def __init__( @property def eta_th(self): - return self.conversion_factors[0][self.Q_fu.label] + return self.conversion_factors[0][self.fuel_flow.label] @eta_th.setter def eta_th(self, value): check_bounds(value, 'eta_th', self.label_full, 0, 1) if len(self.conversion_factors) < 2: # Initialize structure if not yet set - self.conversion_factors = [{self.Q_fu.label: value, self.Q_th.label: 1}, {}] + self.conversion_factors = [{self.fuel_flow.label: value, self.thermal_flow.label: 1}, {}] else: - self.conversion_factors[0] = {self.Q_fu.label: value, self.Q_th.label: 1} + self.conversion_factors[0] = {self.fuel_flow.label: value, self.thermal_flow.label: 1} @property def eta_el(self): - return self.conversion_factors[1][self.Q_fu.label] + return self.conversion_factors[1][self.fuel_flow.label] @eta_el.setter def eta_el(self, value): check_bounds(value, 'eta_el', self.label_full, 0, 1) if len(self.conversion_factors) < 2: # Initialize structure if not yet set - self.conversion_factors = [{}, {self.Q_fu.label: value, self.P_el.label: 1}] + self.conversion_factors = [{}, {self.fuel_flow.label: value, self.power_flow.label: 1}] else: - self.conversion_factors[1] = {self.Q_fu.label: value, self.P_el.label: 1} + self.conversion_factors[1] = {self.fuel_flow.label: value, self.power_flow.label: 1} @register_class_for_io @@ -597,7 +599,7 @@ class HeatPumpWithSource(LinearConverter): def __init__( self, label: str, - COP: Numeric_TPS, + cop: Numeric_TPS, power_flow: Flow | None = None, heat_source_flow: Flow | None = None, thermal_flow: Flow | None = None, @@ -609,6 +611,7 @@ def __init__( power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) heat_source_flow = self._handle_deprecated_kwarg(kwargs, 'Q_ab', 'heat_source_flow', heat_source_flow) thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + cop = self._handle_deprecated_kwarg(kwargs, 'COP', 'cop', cop) super().__init__( label, @@ -618,23 +621,23 @@ def __init__( on_off_parameters=on_off_parameters, meta_data=meta_data, ) - self.P_el = power_flow - self.Q_ab = heat_source_flow - self.Q_th = thermal_flow - self.COP = COP # Uses setter + self.power_flow = power_flow + self.heat_source_flow = heat_source_flow + self.thermal_flow = thermal_flow + self.cop = cop # Uses setter @property - def COP(self): # noqa: N802 - return self.conversion_factors[0][self.P_el.label] + def cop(self): # noqa: N802 + return self.conversion_factors[0][self.power_flow.label] - @COP.setter - def COP(self, value): # noqa: N802 - check_bounds(value, 'COP', self.label_full, 1, 20) + @cop.setter + def cop(self, value): # noqa: N802 + check_bounds(value, 'cop', self.label_full, 1, 20) if np.any(np.asarray(value) <= 1): - raise ValueError(f'{self.label_full}.COP must be strictly > 1 for HeatPumpWithSource.') + raise ValueError(f'{self.label_full}.cop must be strictly > 1 for HeatPumpWithSource.') self.conversion_factors = [ - {self.P_el.label: value, self.Q_th.label: 1}, - {self.Q_ab.label: value / (value - 1), self.Q_th.label: 1}, + {self.power_flow.label: value, self.thermal_flow.label: 1}, + {self.heat_source_flow.label: value / (value - 1), self.thermal_flow.label: 1}, ] From bddce4ff61527a4d5a3579d6560e760302ab59bc Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Sat, 15 Nov 2025 11:40:11 +0100 Subject: [PATCH 35/35] Step 3 --- flixopt/linear_converters.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index 371810094..76aa25a47 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -89,6 +89,7 @@ def __init__( # Handle deprecated parameters fuel_flow = self._handle_deprecated_kwarg(kwargs, 'Q_fu', 'fuel_flow', fuel_flow) thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + self._validate_kwargs(kwargs) super().__init__( label, @@ -183,6 +184,7 @@ def __init__( # Handle deprecated parameters power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + self._validate_kwargs(kwargs) super().__init__( label, @@ -279,6 +281,7 @@ def __init__( power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) cop = self._handle_deprecated_kwarg(kwargs, 'COP', 'cop', cop) + self._validate_kwargs(kwargs) super().__init__( label, @@ -293,12 +296,12 @@ def __init__( self.cop = cop # Uses setter @property - def COP(self): # noqa: N802 + def cop(self): return self.conversion_factors[0][self.power_flow.label] - @COP.setter - def COP(self, value): # noqa: N802 - check_bounds(value, 'COP', self.label_full, 1, 20) + @cop.setter + def cop(self, value): + check_bounds(value, 'cop', self.label_full, 1, 20) self.conversion_factors = [{self.power_flow.label: value, self.thermal_flow.label: 1}] @@ -375,6 +378,7 @@ def __init__( # Handle deprecated parameters power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + self._validate_kwargs(kwargs) super().__init__( label, @@ -482,6 +486,7 @@ def __init__( fuel_flow = self._handle_deprecated_kwarg(kwargs, 'Q_fu', 'fuel_flow', fuel_flow) power_flow = self._handle_deprecated_kwarg(kwargs, 'P_el', 'power_flow', power_flow) thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) + self._validate_kwargs(kwargs) super().__init__( label, @@ -539,7 +544,7 @@ class HeatPumpWithSource(LinearConverter): Args: label: The label of the Element. Used to identify it in the FlowSystem. - COP: Coefficient of Performance (typically 1-20 range). Defines the ratio of + cop: Coefficient of Performance (typically 1-20 range). Defines the ratio of thermal output to electrical input. The heat source extraction is automatically calculated as heat_source_flow = thermal_flow × (COP-1)/COP. power_flow: Electrical input-flow representing electricity consumption for compressor. @@ -549,6 +554,7 @@ class HeatPumpWithSource(LinearConverter): on_off_parameters: Parameters defining binary operation constraints and costs. meta_data: Used to store additional information. Not used internally but saved in results. Only use Python native types. + COP: *Deprecated*. Use `cop` instead. P_el: *Deprecated*. Use `power_flow` instead. Q_ab: *Deprecated*. Use `heat_source_flow` instead. Q_th: *Deprecated*. Use `thermal_flow` instead. @@ -559,7 +565,7 @@ class HeatPumpWithSource(LinearConverter): ```python ground_source_hp = HeatPumpWithSource( label='geothermal_heat_pump', - COP=4.5, # High COP due to stable ground temperature + cop=4.5, # High COP due to stable ground temperature power_flow=electricity_flow, heat_source_flow=ground_heat_extraction, # Heat extracted from ground loop thermal_flow=building_heating_flow, @@ -571,7 +577,7 @@ class HeatPumpWithSource(LinearConverter): ```python waste_heat_pump = HeatPumpWithSource( label='waste_heat_pump', - COP=temperature_dependent_cop, # Varies with temperature of heat source + cop=temperature_dependent_cop, # Varies with temperature of heat source power_flow=electricity_consumption, heat_source_flow=industrial_heat_extraction, # Heat extracted from a industrial process or waste water thermal_flow=heat_supply, @@ -612,6 +618,7 @@ def __init__( heat_source_flow = self._handle_deprecated_kwarg(kwargs, 'Q_ab', 'heat_source_flow', heat_source_flow) thermal_flow = self._handle_deprecated_kwarg(kwargs, 'Q_th', 'thermal_flow', thermal_flow) cop = self._handle_deprecated_kwarg(kwargs, 'COP', 'cop', cop) + self._validate_kwargs(kwargs) super().__init__( label,