From 97c9787dc421628fe0698ea94afab7ba34cfa67e Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Tue, 3 Feb 2026 22:14:53 +0100 Subject: [PATCH 1/8] Move validation to FlowsData mostly --- flixopt/batched.py | 64 ++++++++++++++++++++++++++++++++++++++++++ flixopt/elements.py | 53 +++++++++++++++------------------- flixopt/flow_system.py | 4 +++ 3 files changed, 90 insertions(+), 31 deletions(-) diff --git a/flixopt/batched.py b/flixopt/batched.py index 1f054f519..1cd1d8ea6 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -11,6 +11,7 @@ from __future__ import annotations +import logging from functools import cached_property from typing import TYPE_CHECKING @@ -29,6 +30,8 @@ from .elements import Bus, Component, Flow from .flow_system import FlowSystem +logger = logging.getLogger('flixopt') + def build_effects_array( effect_dicts: dict[str, dict[str, float | xr.DataArray]], @@ -1476,6 +1479,67 @@ def _broadcast_existing(self, arr: xr.DataArray, dims: list[str] | None = None) return self._ensure_canonical_order(arr) + # === Validation === + + def validate(self) -> None: + """Validate all flows after transformation. + + This method performs two types of validation: + 1. Config validation on each flow (simple checks, no DataArray operations) + 2. Batched validation (DataArray-based checks across all flows) + + Raises: + PlausibilityError: If any validation check fails. + """ + from .core import PlausibilityError + + # First: run config validation on each flow + for flow in self.elements.values(): + flow.validate_config() + + errors: list[str] = [] + + # Batched checks: relative_minimum <= relative_maximum + invalid_bounds = (self.relative_minimum > self.relative_maximum).any( + dim=[d for d in self.relative_minimum.dims if d != 'flow'] + ) + if invalid_bounds.any(): + bad_flows = [fid for fid, bad in zip(self.ids, invalid_bounds.values, strict=False) if bad] + errors.append(f'relative_minimum > relative_maximum for flows: {bad_flows}') + + # Check: size required when relative_minimum > 0 + has_nonzero_min = (self.relative_minimum > 0).any(dim=[d for d in self.relative_minimum.dims if d != 'flow']) + needs_size_for_min = has_nonzero_min & ~self.has_size + if needs_size_for_min.any(): + bad_flows = [fid for fid, bad in zip(self.ids, needs_size_for_min.values, strict=False) if bad] + errors.append( + f'relative_minimum > 0 but no size defined for flows: {bad_flows}. ' + f'A size is required because the lower bound is size * relative_minimum.' + ) + + # Check: size required when relative_maximum < 1 + has_nondefault_max = (self.relative_maximum < 1).any(dim=[d for d in self.relative_maximum.dims if d != 'flow']) + needs_size_for_max = has_nondefault_max & ~self.has_size + if needs_size_for_max.any(): + bad_flows = [fid for fid, bad in zip(self.ids, needs_size_for_max.values, strict=False) if bad] + errors.append( + f'relative_maximum < 1 but no size defined for flows: {bad_flows}. ' + f'A size is required because the upper bound is size * relative_maximum.' + ) + + # Warning: relative_minimum > 0 without status_parameters prevents switching inactive + has_nonzero_min_no_status = has_nonzero_min & ~self.has_status + if has_nonzero_min_no_status.any(): + warn_flows = [fid for fid, warn in zip(self.ids, has_nonzero_min_no_status.values, strict=False) if warn] + logger.warning( + f'Flows {warn_flows} have relative_minimum > 0 and no status_parameters. ' + f'This prevents the flow from switching inactive (flow_rate = 0). ' + f'Consider using status_parameters to allow switching active and inactive.' + ) + + if errors: + raise PlausibilityError('\n'.join(errors)) + class EffectsData: """Batched data container for all effects. diff --git a/flixopt/elements.py b/flixopt/elements.py index 10933feb6..2110b4d94 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -661,11 +661,12 @@ def transform_data(self) -> None: elif self.size is not None: self.size = self._fit_coords(f'{self.prefix}|size', self.size, dims=['period', 'scenario']) - def _plausibility_checks(self) -> None: - # TODO: Incorporate into Variable? (Lower_bound can not be greater than upper bound - if (self.relative_minimum > self.relative_maximum).any(): - raise PlausibilityError(self.label_full + ': Take care, that relative_minimum <= relative_maximum!') + def validate_config(self) -> None: + """Validate configuration consistency (before transformation). + These are simple checks that don't require DataArray operations. + Checks that require transformed data are done in FlowsData.validate(). + """ # Size is required when using StatusParameters (for big-M constraints) if self.status_parameters is not None and self.size is None: raise PlausibilityError( @@ -679,19 +680,6 @@ def _plausibility_checks(self) -> None: f'A size is required because flow_rate = size * fixed_relative_profile.' ) - # Size is required when using non-default relative bounds (flow_rate = size * relative_bound) - if self.size is None and np.any(self.relative_minimum > 0): - raise PlausibilityError( - f'Flow "{self.label_full}" has relative_minimum > 0 but no size defined. ' - f'A size is required because the lower bound is size * relative_minimum.' - ) - - if self.size is None and np.any(self.relative_maximum < 1): - raise PlausibilityError( - f'Flow "{self.label_full}" has relative_maximum != 1 but no size defined. ' - f'A size is required because the upper bound is size * relative_maximum.' - ) - # Size is required for load factor constraints (total_flow_hours / size) if self.size is None and self.load_factor_min is not None: raise PlausibilityError( @@ -705,19 +693,7 @@ def _plausibility_checks(self) -> None: f'A size is required because the constraint is total_flow_hours <= size * load_factor_max * hours.' ) - if self.fixed_relative_profile is not None and self.status_parameters is not None: - logger.warning( - f'Flow {self.label_full} has both a fixed_relative_profile and status_parameters.' - f'This will allow the flow to be switched active and inactive, effectively differing from the fixed_flow_rate.' - ) - - if np.any(self.relative_minimum > 0) and self.status_parameters is None: - logger.warning( - f'Flow {self.label_full} has a relative_minimum of {self.relative_minimum} and no status_parameters. ' - f'This prevents the Flow from switching inactive (flow_rate = 0). ' - f'Consider using status_parameters to allow the Flow to be switched active and inactive.' - ) - + # Validate previous_flow_rate type if self.previous_flow_rate is not None: if not any( [ @@ -726,10 +702,25 @@ def _plausibility_checks(self) -> None: ] ): raise TypeError( - f'previous_flow_rate must be None, a scalar, a list of scalars or a 1D-numpy-array. Got {type(self.previous_flow_rate)}. ' + f'previous_flow_rate must be None, a scalar, a list of scalars or a 1D-numpy-array. ' + f'Got {type(self.previous_flow_rate)}. ' f'Different values in different periods or scenarios are not yet supported.' ) + # Warning: fixed_relative_profile + status_parameters is unusual + if self.fixed_relative_profile is not None and self.status_parameters is not None: + logger.warning( + f'Flow {self.label_full} has both a fixed_relative_profile and status_parameters. ' + f'This will allow the flow to be switched active and inactive, effectively differing from the fixed_flow_rate.' + ) + + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config(). + + DataArray-based validation is now done in FlowsData.validate(). + """ + self.validate_config() + @property def label_full(self) -> str: return f'{self.component}({self.label})' diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 54e76d333..dd4cc5f41 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -1497,9 +1497,13 @@ def _prepare_effects(self) -> None: def _run_plausibility_checks(self) -> None: """Run plausibility checks on all elements after data transformation.""" + # Element-level config validation (simple checks, no DataArray operations) for element in chain(self.components.values(), self.effects.values(), self.buses.values()): element._plausibility_checks() + # Batched validation for flows (DataArray-based checks) + self.batched.flows.validate() + def _validate_system_integrity(self) -> None: """ Validate cross-element references to ensure system consistency. From 5894597b0dbf27f78a18a678a69a60f2d930c86a Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Tue, 3 Feb 2026 22:21:41 +0100 Subject: [PATCH 2/8] Summary: Validation Split for All Interface Classes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Classes Updated with validate_config() ┌──────────────────┬───────────────┬────────────────────────────────────────────────────────────────────────────┐ │ Class │ Location │ Config Checks │ ├──────────────────┼───────────────┼────────────────────────────────────────────────────────────────────────────┤ │ Component │ elements.py │ unique flow labels, status→flows.size │ ├──────────────────┼───────────────┼────────────────────────────────────────────────────────────────────────────┤ │ Bus │ elements.py │ no flows connected │ ├──────────────────┼───────────────┼────────────────────────────────────────────────────────────────────────────┤ │ Flow │ elements.py │ status→size, fixed_profile→size, load_factor→size, previous_flow_rate type │ ├──────────────────┼───────────────┼────────────────────────────────────────────────────────────────────────────┤ │ Effect │ effects.py │ period dimension for over_periods constraints │ ├──────────────────┼───────────────┼────────────────────────────────────────────────────────────────────────────┤ │ EffectCollection │ effects.py │ circular loops, unknown effect refs │ ├──────────────────┼───────────────┼────────────────────────────────────────────────────────────────────────────┤ │ LinearConverter │ components.py │ conversion_factors XOR piecewise, degrees_of_freedom, flow refs │ ├──────────────────┼───────────────┼────────────────────────────────────────────────────────────────────────────┤ │ Storage │ components.py │ initial_charge_state string, balanced→InvestParams, final_charge→capacity │ ├──────────────────┼───────────────┼────────────────────────────────────────────────────────────────────────────┤ │ Transmission │ components.py │ bus consistency, balanced→InvestParams │ └──────────────────┴───────────────┴────────────────────────────────────────────────────────────────────────────┘ *Data Classes with validate() ┌───────────────────┬────────────┬─────────────────────────────────────────────────────────────────────────┐ │ Class │ Location │ DataArray Checks │ ├───────────────────┼────────────┼─────────────────────────────────────────────────────────────────────────┤ │ FlowsData │ batched.py │ relative_min ≤ max, size required for bounds │ ├───────────────────┼────────────┼─────────────────────────────────────────────────────────────────────────┤ │ StoragesData │ batched.py │ capacity for relative bounds, initial vs capacity, balanced size compat │ ├───────────────────┼────────────┼─────────────────────────────────────────────────────────────────────────┤ │ BusesData │ batched.py │ imbalance_penalty == 0 warning │ ├───────────────────┼────────────┼─────────────────────────────────────────────────────────────────────────┤ │ TransmissionsData │ batched.py │ balanced size compatibility │ ├───────────────────┼────────────┼─────────────────────────────────────────────────────────────────────────┤ │ EffectsData │ batched.py │ delegates to validate_config() │ ├───────────────────┼────────────┼─────────────────────────────────────────────────────────────────────────┤ │ ComponentsData │ batched.py │ delegates to validate_config() │ ├───────────────────┼────────────┼─────────────────────────────────────────────────────────────────────────┤ │ ConvertersData │ batched.py │ delegates to validate_config() │ └───────────────────┴────────────┴─────────────────────────────────────────────────────────────────────────┘ Updated FlowSystem _run_plausibility_checks() now creates temporary *Data instances and calls validate() on each, which handles both config and DataArray checks in a centralized way. --- flixopt/batched.py | 154 +++++++++++++++++++++++++++++++++++++++++ flixopt/components.py | 131 ++++++++++++++--------------------- flixopt/effects.py | 20 +++++- flixopt/elements.py | 30 +++++--- flixopt/flow_system.py | 39 +++++++++-- 5 files changed, 279 insertions(+), 95 deletions(-) diff --git a/flixopt/batched.py b/flixopt/batched.py index 1cd1d8ea6..cc46734f1 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -750,6 +750,87 @@ def charge_state_upper_bounds(self) -> xr.DataArray: """(storage, time_extra) - absolute upper bounds = relative_max * capacity_upper.""" return self.relative_maximum_charge_state_extra * self.capacity_upper + # === Validation === + + def validate(self) -> None: + """Validate all storages after transformation. + + Performs batched validation checks that require DataArray operations. + Simple config checks are done in Storage.validate_config(). + + Raises: + PlausibilityError: If any validation check fails. + """ + from .core import PlausibilityError + from .modeling import _scalar_safe_isel + + errors: list[str] = [] + + for storage in self._storages: + # First run config validation + storage.validate_config() + + sid = storage.label_full + + # Capacity required for non-default relative bounds (DataArray checks) + if storage.capacity_in_flow_hours is None: + if np.any(storage.relative_minimum_charge_state > 0): + errors.append( + f'Storage "{sid}" has relative_minimum_charge_state > 0 but no capacity_in_flow_hours. ' + f'A capacity is required because the lower bound is capacity * relative_minimum_charge_state.' + ) + if np.any(storage.relative_maximum_charge_state < 1): + errors.append( + f'Storage "{sid}" has relative_maximum_charge_state < 1 but no capacity_in_flow_hours. ' + f'A capacity is required because the upper bound is capacity * relative_maximum_charge_state.' + ) + + # Initial charge state vs capacity bounds (DataArray checks) + if storage.capacity_in_flow_hours is not None: + if isinstance(storage.capacity_in_flow_hours, InvestParameters): + minimum_capacity = storage.capacity_in_flow_hours.minimum_or_fixed_size + maximum_capacity = storage.capacity_in_flow_hours.maximum_or_fixed_size + else: + maximum_capacity = storage.capacity_in_flow_hours + minimum_capacity = storage.capacity_in_flow_hours + + min_initial_at_max_capacity = maximum_capacity * _scalar_safe_isel( + storage.relative_minimum_charge_state, {'time': 0} + ) + max_initial_at_min_capacity = minimum_capacity * _scalar_safe_isel( + storage.relative_maximum_charge_state, {'time': 0} + ) + + initial_equals_final = isinstance(storage.initial_charge_state, str) + if not initial_equals_final and storage.initial_charge_state is not None: + if (storage.initial_charge_state > max_initial_at_min_capacity).any(): + errors.append( + f'{sid}: initial_charge_state={storage.initial_charge_state} ' + f'is constraining the investment decision. Choose a value <= {max_initial_at_min_capacity}.' + ) + if (storage.initial_charge_state < min_initial_at_max_capacity).any(): + errors.append( + f'{sid}: initial_charge_state={storage.initial_charge_state} ' + f'is constraining the investment decision. Choose a value >= {min_initial_at_max_capacity}.' + ) + + # Balanced charging/discharging size compatibility (DataArray checks) + if storage.balanced: + charging_min = storage.charging.size.minimum_or_fixed_size + charging_max = storage.charging.size.maximum_or_fixed_size + discharging_min = storage.discharging.size.minimum_or_fixed_size + discharging_max = storage.discharging.size.maximum_or_fixed_size + + if (charging_min > discharging_max).any() or (charging_max < discharging_min).any(): + errors.append( + f'Balancing charging and discharging Flows in {sid} need compatible minimum and maximum sizes. ' + f'Got: charging.size.minimum={charging_min}, charging.size.maximum={charging_max} and ' + f'discharging.size.minimum={discharging_min}, discharging.size.maximum={discharging_max}.' + ) + + if errors: + raise PlausibilityError('\n'.join(errors)) + class FlowsData: """Batched data container for all flows with indexed access. @@ -1658,6 +1739,14 @@ def values(self): """Iterate over Effect objects.""" return self._effects + def validate(self) -> None: + """Validate all effects after transformation. + + Simple config checks are done in Effect.validate_config(). + """ + for effect in self._effects: + effect.validate_config() + class BusesData: """Batched data container for buses.""" @@ -1684,6 +1773,23 @@ def imbalance_elements(self) -> list[Bus]: """Bus objects that allow imbalance.""" return [b for b in self._buses if b.allows_imbalance] + def validate(self) -> None: + """Validate all buses after transformation. + + Performs validation checks including DataArray operations. + Simple config checks are done in Bus.validate_config(). + """ + for bus in self._buses: + bus.validate_config() + + # Warning: imbalance_penalty == 0 (DataArray check) + if bus.imbalance_penalty_per_flow_hour is not None: + zero_penalty = np.all(np.equal(bus.imbalance_penalty_per_flow_hour, 0)) + if zero_penalty: + logger.warning( + f'In Bus {bus.label_full}, the imbalance_penalty_per_flow_hour is 0. Use "None" or a value > 0.' + ) + class ComponentsData: """Batched data container for components with status.""" @@ -1705,6 +1811,14 @@ def dim_name(self) -> str: def all_components(self) -> list[Component]: return self._all_components + def validate(self) -> None: + """Validate all components after transformation. + + Simple config checks are done in Component.validate_config(). + """ + for component in self._all_components: + component.validate_config() + class ConvertersData: """Batched data container for converters.""" @@ -1731,6 +1845,14 @@ def with_piecewise(self) -> list[LinearConverter]: """Converters with piecewise_conversion.""" return [c for c in self._converters if c.piecewise_conversion] + def validate(self) -> None: + """Validate all converters after transformation. + + Simple config checks are done in LinearConverter.validate_config(). + """ + for converter in self._converters: + converter.validate_config() + class TransmissionsData: """Batched data container for transmissions.""" @@ -1757,6 +1879,38 @@ def balanced(self) -> list[Transmission]: """Transmissions with balanced flow sizes.""" return [t for t in self._transmissions if t.balanced] + def validate(self) -> None: + """Validate all transmissions after transformation. + + Performs validation checks including DataArray operations. + Simple config checks are done in Transmission.validate_config(). + + Raises: + ValueError: If any validation check fails. + """ + errors: list[str] = [] + + for transmission in self._transmissions: + transmission.validate_config() + tid = transmission.label_full + + # Balanced size compatibility (DataArray check) + if transmission.balanced: + in1_min = transmission.in1.size.minimum_or_fixed_size + in1_max = transmission.in1.size.maximum_or_fixed_size + in2_min = transmission.in2.size.minimum_or_fixed_size + in2_max = transmission.in2.size.maximum_or_fixed_size + + if (in1_min > in2_max).any() or (in1_max < in2_min).any(): + errors.append( + f'Balanced Transmission {tid} needs compatible minimum and maximum sizes. ' + f'Got: in1.size.minimum={in1_min}, in1.size.maximum={in1_max} and ' + f'in2.size.minimum={in2_min}, in2.size.maximum={in2_max}.' + ) + + if errors: + raise ValueError('\n'.join(errors)) + class BatchedAccessor: """Accessor for batched data containers on FlowSystem. diff --git a/flixopt/components.py b/flixopt/components.py index 95f1daf4d..1de5283b1 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -17,7 +17,7 @@ from .elements import Component, Flow from .features import MaskHelpers, stack_along_dim from .interface import InvestParameters, PiecewiseConversion, StatusParameters -from .modeling import _scalar_safe_isel, _scalar_safe_reduce +from .modeling import _scalar_safe_reduce from .structure import ( FlowSystemModel, FlowVarName, @@ -190,8 +190,12 @@ def link_to_flow_system(self, flow_system, prefix: str = '') -> None: if self.piecewise_conversion is not None: self.piecewise_conversion.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseConversion')) - def _plausibility_checks(self) -> None: - super()._plausibility_checks() + def validate_config(self) -> None: + """Validate configuration consistency (before transformation). + + These are simple checks that don't require DataArray operations. + """ + super().validate_config() if not self.conversion_factors and not self.piecewise_conversion: raise PlausibilityError('Either conversion_factors or piecewise_conversion must be defined!') if self.conversion_factors and self.piecewise_conversion: @@ -220,6 +224,10 @@ def _plausibility_checks(self) -> None: f'({flow.label_full}).' ) + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config().""" + self.validate_config() + def transform_data(self) -> None: super().transform_data() if self.conversion_factors: @@ -495,31 +503,21 @@ def transform_data(self) -> None: f'{self.prefix}|capacity_in_flow_hours', self.capacity_in_flow_hours, dims=['period', 'scenario'] ) - def _plausibility_checks(self) -> None: - """ - Check for infeasible or uncommon combinations of parameters + def validate_config(self) -> None: + """Validate configuration consistency (before transformation). + + These are simple checks that don't require DataArray operations. + DataArray-based validation is done in StoragesData.validate(). """ - super()._plausibility_checks() + super().validate_config() - # Validate string values and set flag - initial_equals_final = False + # Validate string values for initial_charge_state if isinstance(self.initial_charge_state, str): - if not self.initial_charge_state == 'equals_final': + if self.initial_charge_state != 'equals_final': raise PlausibilityError(f'initial_charge_state has undefined value: {self.initial_charge_state}') - initial_equals_final = True - # Capacity is required when using non-default relative bounds + # Capacity is required for final charge state constraints (simple None checks) if self.capacity_in_flow_hours is None: - if np.any(self.relative_minimum_charge_state > 0): - raise PlausibilityError( - f'Storage "{self.label_full}" has relative_minimum_charge_state > 0 but no capacity_in_flow_hours. ' - f'A capacity is required because the lower bound is capacity * relative_minimum_charge_state.' - ) - if np.any(self.relative_maximum_charge_state < 1): - raise PlausibilityError( - f'Storage "{self.label_full}" has relative_maximum_charge_state < 1 but no capacity_in_flow_hours. ' - f'A capacity is required because the upper bound is capacity * relative_maximum_charge_state.' - ) if self.relative_minimum_final_charge_state is not None: raise PlausibilityError( f'Storage "{self.label_full}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. ' @@ -531,39 +529,7 @@ def _plausibility_checks(self) -> None: f'A capacity is required for relative final charge state constraints.' ) - # Skip capacity-related checks if capacity is None (unbounded) - if self.capacity_in_flow_hours is not None: - # Use new InvestParameters methods to get capacity bounds - if isinstance(self.capacity_in_flow_hours, InvestParameters): - minimum_capacity = self.capacity_in_flow_hours.minimum_or_fixed_size - maximum_capacity = self.capacity_in_flow_hours.maximum_or_fixed_size - else: - maximum_capacity = self.capacity_in_flow_hours - minimum_capacity = self.capacity_in_flow_hours - - # Initial charge state should not constrain investment decision - # If initial > (min_cap * rel_max), investment is forced to increase capacity - # If initial < (max_cap * rel_min), investment is forced to decrease capacity - min_initial_at_max_capacity = maximum_capacity * _scalar_safe_isel( - self.relative_minimum_charge_state, {'time': 0} - ) - max_initial_at_min_capacity = minimum_capacity * _scalar_safe_isel( - self.relative_maximum_charge_state, {'time': 0} - ) - - # Only perform numeric comparisons if using a numeric initial_charge_state - if not initial_equals_final and self.initial_charge_state is not None: - if (self.initial_charge_state > max_initial_at_min_capacity).any(): - raise PlausibilityError( - f'{self.label_full}: {self.initial_charge_state=} ' - f'is constraining the investment decision. Choose a value <= {max_initial_at_min_capacity}.' - ) - if (self.initial_charge_state < min_initial_at_max_capacity).any(): - raise PlausibilityError( - f'{self.label_full}: {self.initial_charge_state=} ' - f'is constraining the investment decision. Choose a value >= {min_initial_at_max_capacity}.' - ) - + # Balanced requires InvestParameters on charging/discharging flows if self.balanced: if not isinstance(self.charging.size, InvestParameters) or not isinstance( self.discharging.size, InvestParameters @@ -572,14 +538,12 @@ def _plausibility_checks(self) -> None: f'Balancing charging and discharging Flows in {self.label_full} is only possible with Investments.' ) - if (self.charging.size.minimum_or_fixed_size > self.discharging.size.maximum_or_fixed_size).any() or ( - self.charging.size.maximum_or_fixed_size < self.discharging.size.minimum_or_fixed_size - ).any(): - raise PlausibilityError( - f'Balancing charging and discharging Flows in {self.label_full} need compatible minimum and maximum sizes.' - f'Got: {self.charging.size.minimum_or_fixed_size=}, {self.charging.size.maximum_or_fixed_size=} and ' - f'{self.discharging.size.minimum_or_fixed_size=}, {self.discharging.size.maximum_or_fixed_size=}.' - ) + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config(). + + DataArray-based checks moved to StoragesData.validate(). + """ + self.validate_config() def __repr__(self) -> str: """Return string representation.""" @@ -737,31 +701,38 @@ def __init__( self.absolute_losses = absolute_losses self.balanced = balanced - def _plausibility_checks(self): - super()._plausibility_checks() - # check buses: + def validate_config(self) -> None: + """Validate configuration consistency (before transformation). + + These are simple checks that don't require DataArray operations. + DataArray-based validation is done in TransmissionsData.validate(). + """ + super().validate_config() + # Check buses consistency if self.in2 is not None: - assert self.in2.bus == self.out1.bus, ( - f'Output 1 and Input 2 do not start/end at the same Bus: {self.out1.bus=}, {self.in2.bus=}' - ) + if self.in2.bus != self.out1.bus: + raise ValueError( + f'Output 1 and Input 2 do not start/end at the same Bus: {self.out1.bus=}, {self.in2.bus=}' + ) if self.out2 is not None: - assert self.out2.bus == self.in1.bus, ( - f'Input 1 and Output 2 do not start/end at the same Bus: {self.in1.bus=}, {self.out2.bus=}' - ) + if self.out2.bus != self.in1.bus: + raise ValueError( + f'Input 1 and Output 2 do not start/end at the same Bus: {self.in1.bus=}, {self.out2.bus=}' + ) + # Balanced requires InvestParameters on both in-Flows if self.balanced: if self.in2 is None: raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows') if not isinstance(self.in1.size, InvestParameters) or not isinstance(self.in2.size, InvestParameters): raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows') - if (self.in1.size.minimum_or_fixed_size > self.in2.size.maximum_or_fixed_size).any() or ( - self.in1.size.maximum_or_fixed_size < self.in2.size.minimum_or_fixed_size - ).any(): - raise ValueError( - f'Balanced Transmission needs compatible minimum and maximum sizes.' - f'Got: {self.in1.size.minimum_or_fixed_size=}, {self.in1.size.maximum_or_fixed_size=} and ' - f'{self.in2.size.minimum_or_fixed_size=}, {self.in2.size.maximum_or_fixed_size=}.' - ) + + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config(). + + DataArray-based checks moved to TransmissionsData.validate(). + """ + self.validate_config() def _propagate_status_parameters(self) -> None: super()._propagate_status_parameters() diff --git a/flixopt/effects.py b/flixopt/effects.py index 87ed65776..fc8493e70 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -290,7 +290,11 @@ def transform_data(self) -> None: f'{self.prefix}|period_weights', self.period_weights, dims=['period', 'scenario'] ) - def _plausibility_checks(self) -> None: + def validate_config(self) -> None: + """Validate configuration consistency (before transformation). + + These are simple checks that don't require DataArray operations. + """ # Check that minimum_over_periods and maximum_over_periods require a period dimension if ( self.minimum_over_periods is not None or self.maximum_over_periods is not None @@ -301,6 +305,10 @@ def _plausibility_checks(self) -> None: f'the FlowSystem, or remove these constraints.' ) + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config().""" + self.validate_config() + class EffectsModel: """Type-level model for ALL effects with batched variables using 'effect' dimension. @@ -811,7 +819,11 @@ def get_effect_label(eff: str | None) -> str: return {get_effect_label(effect): value for effect, value in effect_values_user.items()} return {self.standard_effect.label: effect_values_user} - def _plausibility_checks(self) -> None: + def validate_config(self) -> None: + """Validate configuration consistency (before transformation). + + These are simple checks that don't require DataArray operations. + """ # Check circular loops in effects: temporal, periodic = self.calculate_effect_share_factors() @@ -834,6 +846,10 @@ def _plausibility_checks(self) -> None: cycle_str = '\n'.join([' -> '.join(cycle) for cycle in periodic_cycles]) raise ValueError(f'Error: circular periodic-shares detected:\n{cycle_str}') + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config().""" + self.validate_config() + def __getitem__(self, effect: str | Effect | None) -> Effect: """ Get an effect by label, or return the standard effect if None is passed diff --git a/flixopt/elements.py b/flixopt/elements.py index 2110b4d94..c7283fe3e 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -237,7 +237,11 @@ def _check_unique_flow_labels(self, inputs: list = None, outputs: list = None): duplicates = {label for label in all_flow_labels if all_flow_labels.count(label) > 1} raise ValueError(f'Flow names must be unique! "{self.label_full}" got 2 or more of: {duplicates}') - def _plausibility_checks(self) -> None: + def validate_config(self) -> None: + """Validate configuration consistency (before transformation). + + These are simple checks that don't require DataArray operations. + """ self._check_unique_flow_labels() # Component with status_parameters requires all flows to have sizes set @@ -251,6 +255,10 @@ def _plausibility_checks(self) -> None: f'(required for big-M constraints).' ) + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config().""" + self.validate_config() + def _connect_flows(self, inputs=None, outputs=None): if inputs is None: inputs = list(self.inputs.values()) @@ -399,18 +407,24 @@ def transform_data(self) -> None: f'{self.prefix}|imbalance_penalty_per_flow_hour', self.imbalance_penalty_per_flow_hour ) - def _plausibility_checks(self) -> None: - if self.imbalance_penalty_per_flow_hour is not None: - zero_penalty = np.all(np.equal(self.imbalance_penalty_per_flow_hour, 0)) - if zero_penalty: - logger.warning( - f'In Bus {self.label_full}, the imbalance_penalty_per_flow_hour is 0. Use "None" or a value > 0.' - ) + def validate_config(self) -> None: + """Validate configuration consistency (before transformation). + + These are simple checks that don't require DataArray operations. + DataArray-based validation is done in BusesData.validate(). + """ if len(self.inputs) == 0 and len(self.outputs) == 0: raise ValueError( f'Bus "{self.label_full}" has no Flows connected to it. Please remove it from the FlowSystem' ) + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config(). + + DataArray-based checks (imbalance_penalty warning) moved to BusesData.validate(). + """ + self.validate_config() + @property def allows_imbalance(self) -> bool: return self.imbalance_penalty_per_flow_hour is not None diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index dd4cc5f41..893dbda6b 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -1496,14 +1496,43 @@ def _prepare_effects(self) -> None: penalty.link_to_flow_system(self) def _run_plausibility_checks(self) -> None: - """Run plausibility checks on all elements after data transformation.""" - # Element-level config validation (simple checks, no DataArray operations) - for element in chain(self.components.values(), self.effects.values(), self.buses.values()): - element._plausibility_checks() + """Run plausibility checks on all elements after data transformation. + + This runs both config validation (simple checks) and DataArray-based + validation through the *Data classes. + """ + from .batched import BusesData, ConvertersData, EffectsData, StoragesData, TransmissionsData + from .components import LinearConverter, Storage, Transmission - # Batched validation for flows (DataArray-based checks) + # Batched validation for flows (includes config + DataArray checks) self.batched.flows.validate() + # Batched validation for buses + BusesData(list(self.buses.values())).validate() + + # Batched validation for effects + EffectsData(self.effects).validate() + + # Batched validation for storages + storages = [c for c in self.components.values() if isinstance(c, Storage)] + if storages: + StoragesData(storages, 'storage', list(self.effects.keys())).validate() + + # Batched validation for converters + converters = [c for c in self.components.values() if isinstance(c, LinearConverter)] + if converters: + ConvertersData(converters).validate() + + # Batched validation for transmissions + transmissions = [c for c in self.components.values() if isinstance(c, Transmission)] + if transmissions: + TransmissionsData(transmissions).validate() + + # Validate remaining components (those without specialized *Data classes) + for component in self.components.values(): + if not isinstance(component, (Storage, LinearConverter, Transmission)): + component.validate_config() + def _validate_system_integrity(self) -> None: """ Validate cross-element references to ensure system consistency. From b6e80dc6d08843e24c82db5ccf7a86ce387bf6a6 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Tue, 3 Feb 2026 22:34:23 +0100 Subject: [PATCH 3/8] Summary: Cached *Data in BatchedAccessor What Changed *1. BatchedAccessor now caches all Data classes: class BatchedAccessor: @property def flows(self) -> FlowsData: ... @property def storages(self) -> StoragesData: ... @property def intercluster_storages(self) -> StoragesData: ... @property def buses(self) -> BusesData: ... @property def effects(self) -> EffectsData: ... @property def components(self) -> ComponentsData: ... @property def converters(self) -> ConvertersData: ... @property def transmissions(self) -> TransmissionsData: ... 2. FlowSystemModel.build_model() now uses cached instances: batched = self.flow_system.batched self.effects = EffectsModel(self, batched.effects) # reuses cached self._flows_model = FlowsModel(self, batched.flows) # reuses cached # etc. 3. _run_plausibility_checks() simplified: batched = self.batched batched.flows.validate() batched.buses.validate() # etc. Benefits - No duplicate creation: Same *Data objects used for validation AND model building - Early validation: Errors caught during connect_and_transform() - Proper invalidation: _batched = None when status drops below CONNECTED - Cleaner code: No temporary object creation in validation or build_model --- flixopt/batched.py | 104 ++++++++++++++++++++++++++++++++++++++++- flixopt/flow_system.py | 47 ++++++------------- flixopt/structure.py | 57 +++++----------------- 3 files changed, 128 insertions(+), 80 deletions(-) diff --git a/flixopt/batched.py b/flixopt/batched.py index cc46734f1..db04a846c 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -1915,13 +1915,26 @@ def validate(self) -> None: class BatchedAccessor: """Accessor for batched data containers on FlowSystem. + Provides cached access to *Data containers for all element types. + The same cached instances are used for both validation (during connect_and_transform) + and model building, ensuring consistency and avoiding duplicate object creation. + Usage: - flow_system.batched.flows # Access FlowsData + flow_system.batched.flows # Access FlowsData + flow_system.batched.storages # Access StoragesData + flow_system.batched.buses # Access BusesData """ def __init__(self, flow_system: FlowSystem): self._fs = flow_system self._flows: FlowsData | None = None + self._storages: StoragesData | None = None + self._intercluster_storages: StoragesData | None = None + self._buses: BusesData | None = None + self._effects: EffectsData | None = None + self._components: ComponentsData | None = None + self._converters: ConvertersData | None = None + self._transmissions: TransmissionsData | None = None @property def flows(self) -> FlowsData: @@ -1931,6 +1944,93 @@ def flows(self) -> FlowsData: self._flows = FlowsData(all_flows, self._fs) return self._flows + @property + def storages(self) -> StoragesData: + """Get or create StoragesData for basic storages (excludes intercluster).""" + if self._storages is None: + from .components import Storage + + clustering = self._fs.clustering + basic_storages = [ + c + for c in self._fs.components.values() + if isinstance(c, Storage) + and not (clustering is not None and c.cluster_mode in ('intercluster', 'intercluster_cyclic')) + ] + effect_ids = list(self._fs.effects.keys()) + self._storages = StoragesData( + basic_storages, 'storage', effect_ids, timesteps_extra=self._fs.timesteps_extra + ) + return self._storages + + @property + def intercluster_storages(self) -> StoragesData: + """Get or create StoragesData for intercluster storages.""" + if self._intercluster_storages is None: + from .components import Storage + + clustering = self._fs.clustering + intercluster = [ + c + for c in self._fs.components.values() + if isinstance(c, Storage) + and clustering is not None + and c.cluster_mode in ('intercluster', 'intercluster_cyclic') + ] + effect_ids = list(self._fs.effects.keys()) + self._intercluster_storages = StoragesData(intercluster, 'intercluster_storage', effect_ids) + return self._intercluster_storages + + @property + def buses(self) -> BusesData: + """Get or create BusesData for all buses.""" + if self._buses is None: + self._buses = BusesData(list(self._fs.buses.values())) + return self._buses + + @property + def effects(self) -> EffectsData: + """Get or create EffectsData for all effects.""" + if self._effects is None: + self._effects = EffectsData(self._fs.effects) + return self._effects + + @property + def components(self) -> ComponentsData: + """Get or create ComponentsData for all components.""" + if self._components is None: + all_components = list(self._fs.components.values()) + components_with_status = [c for c in all_components if c.status_parameters is not None] + self._components = ComponentsData(components_with_status, all_components) + return self._components + + @property + def converters(self) -> ConvertersData: + """Get or create ConvertersData for all converters.""" + if self._converters is None: + from .components import LinearConverter + + converters = [c for c in self._fs.components.values() if isinstance(c, LinearConverter)] + self._converters = ConvertersData(converters) + return self._converters + + @property + def transmissions(self) -> TransmissionsData: + """Get or create TransmissionsData for all transmissions.""" + if self._transmissions is None: + from .components import Transmission + + transmissions = [c for c in self._fs.components.values() if isinstance(c, Transmission)] + self._transmissions = TransmissionsData(transmissions) + return self._transmissions + def _reset(self) -> None: - """Reset cached data (called when FlowSystem changes).""" + """Reset all cached data (called when FlowSystem is invalidated).""" self._flows = None + self._storages = None + self._intercluster_storages = None + self._buses = None + self._effects = None + self._components = None + self._converters = None + self._transmissions = None diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 893dbda6b..65c874bb4 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -1499,39 +1499,20 @@ def _run_plausibility_checks(self) -> None: """Run plausibility checks on all elements after data transformation. This runs both config validation (simple checks) and DataArray-based - validation through the *Data classes. - """ - from .batched import BusesData, ConvertersData, EffectsData, StoragesData, TransmissionsData - from .components import LinearConverter, Storage, Transmission - - # Batched validation for flows (includes config + DataArray checks) - self.batched.flows.validate() - - # Batched validation for buses - BusesData(list(self.buses.values())).validate() - - # Batched validation for effects - EffectsData(self.effects).validate() - - # Batched validation for storages - storages = [c for c in self.components.values() if isinstance(c, Storage)] - if storages: - StoragesData(storages, 'storage', list(self.effects.keys())).validate() - - # Batched validation for converters - converters = [c for c in self.components.values() if isinstance(c, LinearConverter)] - if converters: - ConvertersData(converters).validate() - - # Batched validation for transmissions - transmissions = [c for c in self.components.values() if isinstance(c, Transmission)] - if transmissions: - TransmissionsData(transmissions).validate() - - # Validate remaining components (those without specialized *Data classes) - for component in self.components.values(): - if not isinstance(component, (Storage, LinearConverter, Transmission)): - component.validate_config() + validation through the cached *Data classes in BatchedAccessor. + The same *Data instances are reused during model building. + """ + # Use cached *Data from BatchedAccessor + batched = self.batched + + # Batched validation for each element type + batched.flows.validate() + batched.buses.validate() + batched.effects.validate() + batched.storages.validate() + batched.converters.validate() + batched.transmissions.validate() + batched.components.validate() def _validate_system_integrity(self) -> None: """ diff --git a/flixopt/structure.py b/flixopt/structure.py index f01566569..91ac8657a 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -893,15 +893,7 @@ def build_model(self, timing: bool = False): Args: timing: If True, print detailed timing breakdown. """ - from .batched import ( - BusesData, - ComponentsData, - ConvertersData, - EffectsData, - StoragesData, - TransmissionsData, - ) - from .components import InterclusterStoragesModel, LinearConverter, Storage, StoragesModel, Transmission + from .components import InterclusterStoragesModel, StoragesModel from .effects import EffectsModel from .elements import ( BusesModel, @@ -913,65 +905,40 @@ def build_model(self, timing: bool = False): timer = _BuildTimer() if timing else None - self.effects = EffectsModel(self, EffectsData(self.flow_system.effects)) + # Use cached *Data from BatchedAccessor (same instances used for validation) + batched = self.flow_system.batched + + self.effects = EffectsModel(self, batched.effects) if timer: timer.record('effects') - self._flows_model = FlowsModel(self, self.flow_system.batched.flows) + self._flows_model = FlowsModel(self, batched.flows) if timer: timer.record('flows') - self._buses_model = BusesModel(self, BusesData(list(self.flow_system.buses.values())), self._flows_model) + self._buses_model = BusesModel(self, batched.buses, self._flows_model) if timer: timer.record('buses') - all_components = list(self.flow_system.components.values()) - effect_ids = list(self.flow_system.effects.keys()) - clustering = self.flow_system.clustering - - basic_storages = [ - c - for c in all_components - if isinstance(c, Storage) - and not (clustering is not None and c.cluster_mode in ('intercluster', 'intercluster_cyclic')) - ] - self._storages_model = StoragesModel( - self, - StoragesData(basic_storages, 'storage', effect_ids, timesteps_extra=self.flow_system.timesteps_extra), - self._flows_model, - ) + self._storages_model = StoragesModel(self, batched.storages, self._flows_model) if timer: timer.record('storages') - intercluster_storages = [ - c - for c in all_components - if isinstance(c, Storage) - and clustering is not None - and c.cluster_mode in ('intercluster', 'intercluster_cyclic') - ] self._intercluster_storages_model = InterclusterStoragesModel( - self, - StoragesData(intercluster_storages, 'intercluster_storage', effect_ids), - self._flows_model, + self, batched.intercluster_storages, self._flows_model ) if timer: timer.record('intercluster_storages') - components_with_status = [c for c in all_components if c.status_parameters is not None] - self._components_model = ComponentsModel( - self, ComponentsData(components_with_status, all_components), self._flows_model - ) + self._components_model = ComponentsModel(self, batched.components, self._flows_model) if timer: timer.record('components') - converters = [c for c in all_components if isinstance(c, LinearConverter)] - self._converters_model = ConvertersModel(self, ConvertersData(converters), self._flows_model) + self._converters_model = ConvertersModel(self, batched.converters, self._flows_model) if timer: timer.record('converters') - transmissions = [c for c in all_components if isinstance(c, Transmission)] - self._transmissions_model = TransmissionsModel(self, TransmissionsData(transmissions), self._flows_model) + self._transmissions_model = TransmissionsModel(self, batched.transmissions, self._flows_model) if timer: timer.record('transmissions') From 48fc56111c9be62ca0e821254ef3c1530c5614d5 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Tue, 3 Feb 2026 23:16:21 +0100 Subject: [PATCH 4/8] Temp --- flixopt/batched.py | 70 +++++++++++++----------------------------- flixopt/components.py | 11 ++++--- flixopt/effects.py | 8 +++-- flixopt/elements.py | 11 ++++--- flixopt/flow_system.py | 41 +++++++++++++++++-------- 5 files changed, 67 insertions(+), 74 deletions(-) diff --git a/flixopt/batched.py b/flixopt/batched.py index db04a846c..8e3e082a9 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -19,6 +19,7 @@ import pandas as pd import xarray as xr +from .core import PlausibilityError from .features import fast_isnull, fast_notnull, stack_along_dim from .interface import InvestParameters, StatusParameters from .modeling import _scalar_safe_isel_drop @@ -753,23 +754,19 @@ def charge_state_upper_bounds(self) -> xr.DataArray: # === Validation === def validate(self) -> None: - """Validate all storages after transformation. + """Validate all storages after transformation (DataArray checks only). - Performs batched validation checks that require DataArray operations. - Simple config checks are done in Storage.validate_config(). + Config validation (simple checks) is done separately via Storage.validate_config(). + This method only performs checks that require DataArray operations. Raises: PlausibilityError: If any validation check fails. """ - from .core import PlausibilityError from .modeling import _scalar_safe_isel errors: list[str] = [] for storage in self._storages: - # First run config validation - storage.validate_config() - sid = storage.label_full # Capacity required for non-default relative bounds (DataArray checks) @@ -1563,20 +1560,14 @@ def _broadcast_existing(self, arr: xr.DataArray, dims: list[str] | None = None) # === Validation === def validate(self) -> None: - """Validate all flows after transformation. + """Validate all flows after transformation (DataArray checks only). - This method performs two types of validation: - 1. Config validation on each flow (simple checks, no DataArray operations) - 2. Batched validation (DataArray-based checks across all flows) + Config validation (simple checks) is done separately via Flow.validate_config(). + This method only performs checks that require DataArray operations. Raises: PlausibilityError: If any validation check fails. """ - from .core import PlausibilityError - - # First: run config validation on each flow - for flow in self.elements.values(): - flow.validate_config() errors: list[str] = [] @@ -1739,13 +1730,8 @@ def values(self): """Iterate over Effect objects.""" return self._effects - def validate(self) -> None: - """Validate all effects after transformation. - - Simple config checks are done in Effect.validate_config(). - """ - for effect in self._effects: - effect.validate_config() + # No validate() method needed - Effect has no DataArray checks. + # Config validation is done in FlowSystem._run_config_validation(). class BusesData: @@ -1774,14 +1760,12 @@ def imbalance_elements(self) -> list[Bus]: return [b for b in self._buses if b.allows_imbalance] def validate(self) -> None: - """Validate all buses after transformation. + """Validate all buses after transformation (DataArray checks only). - Performs validation checks including DataArray operations. - Simple config checks are done in Bus.validate_config(). + Config validation (simple checks) is done separately via Bus.validate_config(). + This method only performs checks that require DataArray operations. """ for bus in self._buses: - bus.validate_config() - # Warning: imbalance_penalty == 0 (DataArray check) if bus.imbalance_penalty_per_flow_hour is not None: zero_penalty = np.all(np.equal(bus.imbalance_penalty_per_flow_hour, 0)) @@ -1811,13 +1795,8 @@ def dim_name(self) -> str: def all_components(self) -> list[Component]: return self._all_components - def validate(self) -> None: - """Validate all components after transformation. - - Simple config checks are done in Component.validate_config(). - """ - for component in self._all_components: - component.validate_config() + # No validate() method needed - Component has no DataArray checks. + # Config validation is done in FlowSystem._run_config_validation(). class ConvertersData: @@ -1845,13 +1824,8 @@ def with_piecewise(self) -> list[LinearConverter]: """Converters with piecewise_conversion.""" return [c for c in self._converters if c.piecewise_conversion] - def validate(self) -> None: - """Validate all converters after transformation. - - Simple config checks are done in LinearConverter.validate_config(). - """ - for converter in self._converters: - converter.validate_config() + # No validate() method needed - LinearConverter has no DataArray checks. + # Config validation is done in FlowSystem._run_config_validation(). class TransmissionsData: @@ -1880,18 +1854,18 @@ def balanced(self) -> list[Transmission]: return [t for t in self._transmissions if t.balanced] def validate(self) -> None: - """Validate all transmissions after transformation. + """Validate all transmissions after transformation (DataArray checks only). - Performs validation checks including DataArray operations. - Simple config checks are done in Transmission.validate_config(). + Config validation (simple checks) is done separately via Transmission.validate_config(). + This method only performs checks that require DataArray operations. Raises: - ValueError: If any validation check fails. + PlausibilityError: If any validation check fails. """ + errors: list[str] = [] for transmission in self._transmissions: - transmission.validate_config() tid = transmission.label_full # Balanced size compatibility (DataArray check) @@ -1909,7 +1883,7 @@ def validate(self) -> None: ) if errors: - raise ValueError('\n'.join(errors)) + raise PlausibilityError('\n'.join(errors)) class BatchedAccessor: diff --git a/flixopt/components.py b/flixopt/components.py index 1de5283b1..7437426e1 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -191,8 +191,9 @@ def link_to_flow_system(self, flow_system, prefix: str = '') -> None: self.piecewise_conversion.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseConversion')) def validate_config(self) -> None: - """Validate configuration consistency (before transformation). + """Validate configuration consistency. + Called BEFORE transformation via FlowSystem._run_config_validation(). These are simple checks that don't require DataArray operations. """ super().validate_config() @@ -504,10 +505,10 @@ def transform_data(self) -> None: ) def validate_config(self) -> None: - """Validate configuration consistency (before transformation). + """Validate configuration consistency. + Called BEFORE transformation via FlowSystem._run_config_validation(). These are simple checks that don't require DataArray operations. - DataArray-based validation is done in StoragesData.validate(). """ super().validate_config() @@ -702,10 +703,10 @@ def __init__( self.balanced = balanced def validate_config(self) -> None: - """Validate configuration consistency (before transformation). + """Validate configuration consistency. + Called BEFORE transformation via FlowSystem._run_config_validation(). These are simple checks that don't require DataArray operations. - DataArray-based validation is done in TransmissionsData.validate(). """ super().validate_config() # Check buses consistency diff --git a/flixopt/effects.py b/flixopt/effects.py index fc8493e70..01be79705 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -291,8 +291,9 @@ def transform_data(self) -> None: ) def validate_config(self) -> None: - """Validate configuration consistency (before transformation). + """Validate configuration consistency. + Called BEFORE transformation via FlowSystem._run_config_validation(). These are simple checks that don't require DataArray operations. """ # Check that minimum_over_periods and maximum_over_periods require a period dimension @@ -820,9 +821,10 @@ def get_effect_label(eff: str | None) -> str: return {self.standard_effect.label: effect_values_user} def validate_config(self) -> None: - """Validate configuration consistency (before transformation). + """Validate effect collection structure. - These are simple checks that don't require DataArray operations. + Called BEFORE transformation (in _prepare_effects) to ensure + effect share mappings are valid before any data transformation. """ # Check circular loops in effects: temporal, periodic = self.calculate_effect_share_factors() diff --git a/flixopt/elements.py b/flixopt/elements.py index c7283fe3e..77fa42857 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -238,8 +238,9 @@ def _check_unique_flow_labels(self, inputs: list = None, outputs: list = None): raise ValueError(f'Flow names must be unique! "{self.label_full}" got 2 or more of: {duplicates}') def validate_config(self) -> None: - """Validate configuration consistency (before transformation). + """Validate configuration consistency. + Called BEFORE transformation via FlowSystem._run_config_validation(). These are simple checks that don't require DataArray operations. """ self._check_unique_flow_labels() @@ -408,10 +409,10 @@ def transform_data(self) -> None: ) def validate_config(self) -> None: - """Validate configuration consistency (before transformation). + """Validate configuration consistency. + Called BEFORE transformation via FlowSystem._run_config_validation(). These are simple checks that don't require DataArray operations. - DataArray-based validation is done in BusesData.validate(). """ if len(self.inputs) == 0 and len(self.outputs) == 0: raise ValueError( @@ -676,10 +677,10 @@ def transform_data(self) -> None: self.size = self._fit_coords(f'{self.prefix}|size', self.size, dims=['period', 'scenario']) def validate_config(self) -> None: - """Validate configuration consistency (before transformation). + """Validate configuration consistency. + Called BEFORE transformation via FlowSystem._run_config_validation(). These are simple checks that don't require DataArray operations. - Checks that require transformed data are done in FlowsData.validate(). """ # Size is required when using StatusParameters (for big-M constraints) if self.status_parameters is not None and self.size is None: diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 65c874bb4..51ffab253 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -740,12 +740,17 @@ def connect_and_transform(self): # Note: status parameter propagation happens inside Component.transform_data() self._prepare_effects() + # Config validation BEFORE transformation (fail fast on config errors) + self._run_config_validation() + for element in chain(self.components.values(), self.effects.values(), self.buses.values()): element.transform_data() - # Validate cross-element references immediately after transformation + # Validate cross-element references after transformation self._validate_system_integrity() - self._run_plausibility_checks() + + # Data validation AFTER transformation (DataArray checks) + self._run_data_validation() self._connected_and_transformed = True @@ -1495,24 +1500,34 @@ def _prepare_effects(self) -> None: if penalty._flow_system is None: penalty.link_to_flow_system(self) - def _run_plausibility_checks(self) -> None: - """Run plausibility checks on all elements after data transformation. + def _run_config_validation(self) -> None: + """Run config validation on all elements BEFORE data transformation. - This runs both config validation (simple checks) and DataArray-based - validation through the cached *Data classes in BatchedAccessor. - The same *Data instances are reused during model building. + These are simple checks that don't require DataArray operations. + Called before transform_data() to fail fast on configuration errors. """ - # Use cached *Data from BatchedAccessor - batched = self.batched + for flow in self.flows.values(): + flow.validate_config() + for bus in self.buses.values(): + bus.validate_config() + for effect in self.effects.values(): + effect.validate_config() + for component in self.components.values(): + component.validate_config() + + def _run_data_validation(self) -> None: + """Run data validation on all elements AFTER data transformation. + + These are checks that require DataArray operations (e.g., comparing + bounds across time dimensions). Called after transform_data(). - # Batched validation for each element type + The cached *Data instances are reused during model building. + """ + batched = self.batched batched.flows.validate() batched.buses.validate() - batched.effects.validate() batched.storages.validate() - batched.converters.validate() batched.transmissions.validate() - batched.components.validate() def _validate_system_integrity(self) -> None: """ From 45c99c4269779a0c07ba6b476c8975b8d33be92e Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Wed, 4 Feb 2026 08:35:29 +0100 Subject: [PATCH 5/8] Improve organization of validation --- flixopt/batched.py | 93 ++++++++++++++++++++++++++++------- flixopt/effects.py | 36 ++++---------- flixopt/flow_system.py | 70 +++++++++++++++----------- flixopt/transform_accessor.py | 2 + 4 files changed, 129 insertions(+), 72 deletions(-) diff --git a/flixopt/batched.py b/flixopt/batched.py index 8e3e082a9..461b38d50 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -754,10 +754,11 @@ def charge_state_upper_bounds(self) -> xr.DataArray: # === Validation === def validate(self) -> None: - """Validate all storages after transformation (DataArray checks only). + """Validate all storages (config + DataArray checks). - Config validation (simple checks) is done separately via Storage.validate_config(). - This method only performs checks that require DataArray operations. + Performs both: + - Config validation via Storage.validate_config() + - DataArray validation (post-transformation checks) Raises: PlausibilityError: If any validation check fails. @@ -767,6 +768,7 @@ def validate(self) -> None: errors: list[str] = [] for storage in self._storages: + storage.validate_config() sid = storage.label_full # Capacity required for non-default relative bounds (DataArray checks) @@ -1560,14 +1562,17 @@ def _broadcast_existing(self, arr: xr.DataArray, dims: list[str] | None = None) # === Validation === def validate(self) -> None: - """Validate all flows after transformation (DataArray checks only). + """Validate all flows (config + DataArray checks). - Config validation (simple checks) is done separately via Flow.validate_config(). - This method only performs checks that require DataArray operations. + Performs both: + - Config validation via Flow.validate_config() + - DataArray validation (post-transformation checks) Raises: PlausibilityError: If any validation check fails. """ + for flow in self.elements.values(): + flow.validate_config() errors: list[str] = [] @@ -1730,8 +1735,44 @@ def values(self): """Iterate over Effect objects.""" return self._effects - # No validate() method needed - Effect has no DataArray checks. - # Config validation is done in FlowSystem._run_config_validation(). + def validate(self) -> None: + """Validate all effects and the effect collection structure. + + Performs both: + - Individual effect config validation + - Collection-level validation (circular loops in share mappings, unknown effect refs) + """ + for effect in self._effects: + effect.validate_config() + + # Collection-level validation (share structure) + self._validate_share_structure() + + def _validate_share_structure(self) -> None: + """Validate effect share mappings for cycles and unknown references.""" + from .effects import detect_cycles, tuples_to_adjacency_list + + temporal, periodic = self._collection.calculate_effect_share_factors() + + # Validate all referenced effects exist + edges = list(temporal.keys()) + list(periodic.keys()) + unknown_sources = {src for src, _ in edges if src not in self._collection} + unknown_targets = {tgt for _, tgt in edges if tgt not in self._collection} + unknown = unknown_sources | unknown_targets + if unknown: + raise KeyError(f'Unknown effects used in effect share mappings: {sorted(unknown)}') + + # Check for circular dependencies + temporal_cycles = detect_cycles(tuples_to_adjacency_list([key for key in temporal])) + periodic_cycles = detect_cycles(tuples_to_adjacency_list([key for key in periodic])) + + if temporal_cycles: + cycle_str = '\n'.join([' -> '.join(cycle) for cycle in temporal_cycles]) + raise ValueError(f'Error: circular temporal-shares detected:\n{cycle_str}') + + if periodic_cycles: + cycle_str = '\n'.join([' -> '.join(cycle) for cycle in periodic_cycles]) + raise ValueError(f'Error: circular periodic-shares detected:\n{cycle_str}') class BusesData: @@ -1760,12 +1801,14 @@ def imbalance_elements(self) -> list[Bus]: return [b for b in self._buses if b.allows_imbalance] def validate(self) -> None: - """Validate all buses after transformation (DataArray checks only). + """Validate all buses (config + DataArray checks). - Config validation (simple checks) is done separately via Bus.validate_config(). - This method only performs checks that require DataArray operations. + Performs both: + - Config validation via Bus.validate_config() + - DataArray validation (post-transformation checks) """ for bus in self._buses: + bus.validate_config() # Warning: imbalance_penalty == 0 (DataArray check) if bus.imbalance_penalty_per_flow_hour is not None: zero_penalty = np.all(np.equal(bus.imbalance_penalty_per_flow_hour, 0)) @@ -1795,8 +1838,17 @@ def dim_name(self) -> str: def all_components(self) -> list[Component]: return self._all_components - # No validate() method needed - Component has no DataArray checks. - # Config validation is done in FlowSystem._run_config_validation(). + def validate(self) -> None: + """Validate generic components (config checks only). + + Note: Storage, Transmission, and LinearConverter are validated + through their specialized *Data classes, so we skip them here. + """ + from .components import LinearConverter, Storage, Transmission + + for component in self._all_components: + if not isinstance(component, (Storage, LinearConverter, Transmission)): + component.validate_config() class ConvertersData: @@ -1824,8 +1876,10 @@ def with_piecewise(self) -> list[LinearConverter]: """Converters with piecewise_conversion.""" return [c for c in self._converters if c.piecewise_conversion] - # No validate() method needed - LinearConverter has no DataArray checks. - # Config validation is done in FlowSystem._run_config_validation(). + def validate(self) -> None: + """Validate all converters (config checks, no DataArray operations needed).""" + for converter in self._converters: + converter.validate_config() class TransmissionsData: @@ -1854,14 +1908,17 @@ def balanced(self) -> list[Transmission]: return [t for t in self._transmissions if t.balanced] def validate(self) -> None: - """Validate all transmissions after transformation (DataArray checks only). + """Validate all transmissions (config + DataArray checks). - Config validation (simple checks) is done separately via Transmission.validate_config(). - This method only performs checks that require DataArray operations. + Performs both: + - Config validation via Transmission.validate_config() + - DataArray validation (post-transformation checks) Raises: PlausibilityError: If any validation check fails. """ + for transmission in self._transmissions: + transmission.validate_config() errors: list[str] = [] diff --git a/flixopt/effects.py b/flixopt/effects.py index 01be79705..1f845be5b 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -821,36 +821,20 @@ def get_effect_label(eff: str | None) -> str: return {self.standard_effect.label: effect_values_user} def validate_config(self) -> None: - """Validate effect collection structure. + """Deprecated: Validation is now handled by EffectsData.validate(). - Called BEFORE transformation (in _prepare_effects) to ensure - effect share mappings are valid before any data transformation. + This method is kept for backwards compatibility but does nothing. + Collection-level validation (cycles, unknown refs) is now in EffectsData._validate_share_structure(). """ - # Check circular loops in effects: - temporal, periodic = self.calculate_effect_share_factors() - - # Validate all referenced effects (both sources and targets) exist - edges = list(temporal.keys()) + list(periodic.keys()) - unknown_sources = {src for src, _ in edges if src not in self} - unknown_targets = {tgt for _, tgt in edges if tgt not in self} - unknown = unknown_sources | unknown_targets - if unknown: - raise KeyError(f'Unknown effects used in effect share mappings: {sorted(unknown)}') - - temporal_cycles = detect_cycles(tuples_to_adjacency_list([key for key in temporal])) - periodic_cycles = detect_cycles(tuples_to_adjacency_list([key for key in periodic])) - - if temporal_cycles: - cycle_str = '\n'.join([' -> '.join(cycle) for cycle in temporal_cycles]) - raise ValueError(f'Error: circular temporal-shares detected:\n{cycle_str}') - - if periodic_cycles: - cycle_str = '\n'.join([' -> '.join(cycle) for cycle in periodic_cycles]) - raise ValueError(f'Error: circular periodic-shares detected:\n{cycle_str}') + pass def _plausibility_checks(self) -> None: - """Legacy validation method - delegates to validate_config().""" - self.validate_config() + """Deprecated: Legacy validation method. + + Kept for backwards compatibility but does nothing. + Validation is now handled by EffectsData.validate(). + """ + pass def __getitem__(self, effect: str | Effect | None) -> Effect: """ diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 51ffab253..065c933a5 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -228,7 +228,7 @@ def __init__( self._solution: xr.Dataset | None = None # Aggregation info - populated by transform.cluster() - self.clustering: Clustering | None = None + self._clustering: Clustering | None = None # Statistics accessor cache - lazily initialized, invalidated on new solution self._statistics: StatisticsAccessor | None = None @@ -740,17 +740,14 @@ def connect_and_transform(self): # Note: status parameter propagation happens inside Component.transform_data() self._prepare_effects() - # Config validation BEFORE transformation (fail fast on config errors) - self._run_config_validation() - for element in chain(self.components.values(), self.effects.values(), self.buses.values()): element.transform_data() # Validate cross-element references after transformation self._validate_system_integrity() - # Data validation AFTER transformation (DataArray checks) - self._run_data_validation() + # Unified validation AFTER transformation (config + DataArray checks) + self._run_validation() self._connected_and_transformed = True @@ -1396,6 +1393,32 @@ def batched(self) -> BatchedAccessor: self._batched = BatchedAccessor(self) return self._batched + @property + def clustering(self) -> Clustering | None: + """Clustering metadata for this FlowSystem. + + This property is populated by `transform.cluster()` or when loading + a clustered FlowSystem from file. It contains information about the + original timesteps, cluster assignments, and aggregation metrics. + + Setting this property resets the batched accessor cache to ensure + storage categorization (basic vs intercluster) is correctly computed + based on the new clustering state. + + Returns: + Clustering object if this is a clustered FlowSystem, None otherwise. + """ + return self._clustering + + @clustering.setter + def clustering(self, value: Clustering | None) -> None: + """Set clustering and reset batched accessor cache.""" + self._clustering = value + # Reset batched accessor so storage categorization is recomputed + # with the new clustering state (basic vs intercluster storages) + if self._batched is not None: + self._batched._reset() + def plot_network( self, path: bool | str | pathlib.Path = 'flow_system.html', @@ -1490,44 +1513,35 @@ def _check_if_element_already_assigned(self, element: Element) -> None: ) def _prepare_effects(self) -> None: - """Validate effect collection and create the penalty effect if needed. + """Create the penalty effect if needed. Called before transform_data() so the penalty effect gets transformed. + Validation is done after transformation via _run_validation(). """ - self.effects._plausibility_checks() if self.effects._penalty_effect is None: penalty = self.effects._create_penalty_effect() if penalty._flow_system is None: penalty.link_to_flow_system(self) - def _run_config_validation(self) -> None: - """Run config validation on all elements BEFORE data transformation. - - These are simple checks that don't require DataArray operations. - Called before transform_data() to fail fast on configuration errors. - """ - for flow in self.flows.values(): - flow.validate_config() - for bus in self.buses.values(): - bus.validate_config() - for effect in self.effects.values(): - effect.validate_config() - for component in self.components.values(): - component.validate_config() - - def _run_data_validation(self) -> None: - """Run data validation on all elements AFTER data transformation. + def _run_validation(self) -> None: + """Run all validation through batched *Data classes. - These are checks that require DataArray operations (e.g., comparing - bounds across time dimensions). Called after transform_data(). + Each *Data.validate() method handles both: + - Config validation (simple checks) + - DataArray validation (post-transformation checks) - The cached *Data instances are reused during model building. + Called after transform_data(). The cached *Data instances are + reused during model building. """ batched = self.batched + batched.effects.validate() batched.flows.validate() batched.buses.validate() batched.storages.validate() + batched.intercluster_storages.validate() + batched.converters.validate() batched.transmissions.validate() + batched.components.validate() def _validate_system_integrity(self) -> None: """ diff --git a/flixopt/transform_accessor.py b/flixopt/transform_accessor.py index 1123b9f66..39695e90c 100644 --- a/flixopt/transform_accessor.py +++ b/flixopt/transform_accessor.py @@ -364,6 +364,8 @@ def build(self, ds: xr.Dataset) -> FlowSystem: storage.initial_charge_state = None # Create Clustering object with full AggregationResult access + # Note: The clustering setter automatically resets the batched accessor + # to ensure storage categorization (basic vs intercluster) is recomputed. reduced_fs.clustering = Clustering( original_timesteps=self._fs.timesteps, original_data=drop_constant_arrays(ds, dim='time'), From a5157a27ac00c67e7f3c97d7af2301309cd92398 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Wed, 4 Feb 2026 09:28:28 +0100 Subject: [PATCH 6/8] Bug Found MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When a Bus has no flows connected, FlowsData.validate() crashed with a cryptic TypeError instead of raising a clear ValueError message. Root Cause In _run_validation(), the validation order was: 1. batched.flows.validate() ← crashed on empty DataArray operations 2. batched.buses.validate() ← would have caught the error --- flixopt/batched.py | 4 ++++ flixopt/flow_system.py | 3 ++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/flixopt/batched.py b/flixopt/batched.py index 461b38d50..237b53cbe 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -1571,6 +1571,10 @@ def validate(self) -> None: Raises: PlausibilityError: If any validation check fails. """ + # Early return if no flows (avoids empty DataArray operations) + if not self.elements: + return + for flow in self.elements.values(): flow.validate_config() diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 065c933a5..73294f5e7 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -1534,9 +1534,10 @@ def _run_validation(self) -> None: reused during model building. """ batched = self.batched + # Validate buses first - catches "Bus with no flows" before FlowsData fails on empty arrays + batched.buses.validate() batched.effects.validate() batched.flows.validate() - batched.buses.validate() batched.storages.validate() batched.intercluster_storages.validate() batched.converters.validate() From 2c8792389c64a1769ef514053b83de017f0e0caf Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Thu, 5 Feb 2026 09:14:43 +0100 Subject: [PATCH 7/8] Add warning for no relative_minimum=0 with status --- flixopt/batched.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/flixopt/batched.py b/flixopt/batched.py index 326b5f2e0..f89bb767e 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -1628,6 +1628,16 @@ def validate(self) -> None: f'Consider using status_parameters to allow switching active and inactive.' ) + # Warning: status_parameters with relative_minimum=0 allows status=1 with flow=0 + has_zero_min_with_status = ~has_nonzero_min & self.has_status + if has_zero_min_with_status.any(): + warn_flows = [fid for fid, warn in zip(self.ids, has_zero_min_with_status.values, strict=False) if warn] + logger.warning( + f'Flows {warn_flows} have status_parameters but relative_minimum=0. ' + f'This allows status=1 with flow=0, which may lead to unexpected behavior. ' + f'Consider setting relative_minimum > 0 to ensure the unit produces when active.' + ) + if errors: raise PlausibilityError('\n'.join(errors)) From 53ea1551876fe0ce3ceff87b417a570f47419e02 Mon Sep 17 00:00:00 2001 From: FBumann <117816358+FBumann@users.noreply.github.com> Date: Thu, 5 Feb 2026 09:39:22 +0100 Subject: [PATCH 8/8] Improve validation by using helpers --- flixopt/batched.py | 47 +++++++++++++++++++++-------------------- flixopt/structure.py | 16 +++++++------- tests/test_component.py | 8 ++----- tests/test_flow.py | 22 ++++++++++--------- 4 files changed, 46 insertions(+), 47 deletions(-) diff --git a/flixopt/batched.py b/flixopt/batched.py index f89bb767e..35193b494 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -1571,6 +1571,15 @@ def _broadcast_existing(self, arr: xr.DataArray, dims: list[str] | None = None) # === Validation === + def _any_per_flow(self, arr: xr.DataArray) -> xr.DataArray: + """Reduce to (flow,) by collapsing all non-flow dims with .any().""" + non_flow_dims = [d for d in arr.dims if d != self.dim_name] + return arr.any(dim=non_flow_dims) if non_flow_dims else arr + + def _flagged_ids(self, mask: xr.DataArray) -> list[str]: + """Return flow IDs where mask is True.""" + return [fid for fid, flag in zip(self.ids, mask.values, strict=False) if flag] + def validate(self) -> None: """Validate all flows (config + DataArray checks). @@ -1581,7 +1590,6 @@ def validate(self) -> None: Raises: PlausibilityError: If any validation check fails. """ - # Early return if no flows (avoids empty DataArray operations) if not self.elements: return @@ -1591,51 +1599,44 @@ def validate(self) -> None: errors: list[str] = [] # Batched checks: relative_minimum <= relative_maximum - invalid_bounds = (self.relative_minimum > self.relative_maximum).any( - dim=[d for d in self.relative_minimum.dims if d != 'flow'] - ) + invalid_bounds = self._any_per_flow(self.relative_minimum > self.relative_maximum) if invalid_bounds.any(): - bad_flows = [fid for fid, bad in zip(self.ids, invalid_bounds.values, strict=False) if bad] - errors.append(f'relative_minimum > relative_maximum for flows: {bad_flows}') + errors.append(f'relative_minimum > relative_maximum for flows: {self._flagged_ids(invalid_bounds)}') # Check: size required when relative_minimum > 0 - has_nonzero_min = (self.relative_minimum > 0).any(dim=[d for d in self.relative_minimum.dims if d != 'flow']) - needs_size_for_min = has_nonzero_min & ~self.has_size - if needs_size_for_min.any(): - bad_flows = [fid for fid, bad in zip(self.ids, needs_size_for_min.values, strict=False) if bad] + has_nonzero_min = self._any_per_flow(self.relative_minimum > 0) + if (has_nonzero_min & ~self.has_size).any(): errors.append( - f'relative_minimum > 0 but no size defined for flows: {bad_flows}. ' + f'relative_minimum > 0 but no size defined for flows: ' + f'{self._flagged_ids(has_nonzero_min & ~self.has_size)}. ' f'A size is required because the lower bound is size * relative_minimum.' ) # Check: size required when relative_maximum < 1 - has_nondefault_max = (self.relative_maximum < 1).any(dim=[d for d in self.relative_maximum.dims if d != 'flow']) - needs_size_for_max = has_nondefault_max & ~self.has_size - if needs_size_for_max.any(): - bad_flows = [fid for fid, bad in zip(self.ids, needs_size_for_max.values, strict=False) if bad] + has_nondefault_max = self._any_per_flow(self.relative_maximum < 1) + if (has_nondefault_max & ~self.has_size).any(): errors.append( - f'relative_maximum < 1 but no size defined for flows: {bad_flows}. ' + f'relative_maximum < 1 but no size defined for flows: ' + f'{self._flagged_ids(has_nondefault_max & ~self.has_size)}. ' f'A size is required because the upper bound is size * relative_maximum.' ) # Warning: relative_minimum > 0 without status_parameters prevents switching inactive has_nonzero_min_no_status = has_nonzero_min & ~self.has_status if has_nonzero_min_no_status.any(): - warn_flows = [fid for fid, warn in zip(self.ids, has_nonzero_min_no_status.values, strict=False) if warn] logger.warning( - f'Flows {warn_flows} have relative_minimum > 0 and no status_parameters. ' - f'This prevents the flow from switching inactive (flow_rate = 0). ' + f'Flows {self._flagged_ids(has_nonzero_min_no_status)} have relative_minimum > 0 ' + f'and no status_parameters. This prevents the flow from switching inactive (flow_rate = 0). ' f'Consider using status_parameters to allow switching active and inactive.' ) # Warning: status_parameters with relative_minimum=0 allows status=1 with flow=0 has_zero_min_with_status = ~has_nonzero_min & self.has_status if has_zero_min_with_status.any(): - warn_flows = [fid for fid, warn in zip(self.ids, has_zero_min_with_status.values, strict=False) if warn] logger.warning( - f'Flows {warn_flows} have status_parameters but relative_minimum=0. ' - f'This allows status=1 with flow=0, which may lead to unexpected behavior. ' - f'Consider setting relative_minimum > 0 to ensure the unit produces when active.' + f'Flows {self._flagged_ids(has_zero_min_with_status)} have status_parameters but ' + f'relative_minimum=0. This allows status=1 with flow=0, which may lead to unexpected ' + f'behavior. Consider setting relative_minimum > 0 to ensure the unit produces when active.' ) if errors: diff --git a/flixopt/structure.py b/flixopt/structure.py index 91ac8657a..db6b45dff 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -180,15 +180,15 @@ class _FlowConstraint: UPTIME_UB = f'{FlowVarName.UPTIME}|ub' UPTIME_FORWARD = f'{FlowVarName.UPTIME}|forward' UPTIME_BACKWARD = f'{FlowVarName.UPTIME}|backward' - UPTIME_INITIAL_UB = f'{FlowVarName.UPTIME}|initial_ub' - UPTIME_INITIAL_LB = f'{FlowVarName.UPTIME}|initial_lb' + UPTIME_INITIAL = f'{FlowVarName.UPTIME}|initial' + UPTIME_INITIAL_CONTINUATION = f'{FlowVarName.UPTIME}|initial_continuation' # Downtime tracking constraints (built from variable name) DOWNTIME_UB = f'{FlowVarName.DOWNTIME}|ub' DOWNTIME_FORWARD = f'{FlowVarName.DOWNTIME}|forward' DOWNTIME_BACKWARD = f'{FlowVarName.DOWNTIME}|backward' - DOWNTIME_INITIAL_UB = f'{FlowVarName.DOWNTIME}|initial_ub' - DOWNTIME_INITIAL_LB = f'{FlowVarName.DOWNTIME}|initial_lb' + DOWNTIME_INITIAL = f'{FlowVarName.DOWNTIME}|initial' + DOWNTIME_INITIAL_CONTINUATION = f'{FlowVarName.DOWNTIME}|initial_continuation' FlowVarName.Constraint = _FlowConstraint @@ -233,15 +233,15 @@ class _ComponentConstraint: UPTIME_UB = f'{ComponentVarName.UPTIME}|ub' UPTIME_FORWARD = f'{ComponentVarName.UPTIME}|forward' UPTIME_BACKWARD = f'{ComponentVarName.UPTIME}|backward' - UPTIME_INITIAL_UB = f'{ComponentVarName.UPTIME}|initial_ub' - UPTIME_INITIAL_LB = f'{ComponentVarName.UPTIME}|initial_lb' + UPTIME_INITIAL = f'{ComponentVarName.UPTIME}|initial' + UPTIME_INITIAL_CONTINUATION = f'{ComponentVarName.UPTIME}|initial_continuation' # Downtime tracking constraints DOWNTIME_UB = f'{ComponentVarName.DOWNTIME}|ub' DOWNTIME_FORWARD = f'{ComponentVarName.DOWNTIME}|forward' DOWNTIME_BACKWARD = f'{ComponentVarName.DOWNTIME}|backward' - DOWNTIME_INITIAL_UB = f'{ComponentVarName.DOWNTIME}|initial_ub' - DOWNTIME_INITIAL_LB = f'{ComponentVarName.DOWNTIME}|initial_lb' + DOWNTIME_INITIAL = f'{ComponentVarName.DOWNTIME}|initial' + DOWNTIME_INITIAL_CONTINUATION = f'{ComponentVarName.DOWNTIME}|initial_continuation' ComponentVarName.Constraint = _ComponentConstraint diff --git a/tests/test_component.py b/tests/test_component.py index 12a726a94..77b2cd471 100644 --- a/tests/test_component.py +++ b/tests/test_component.py @@ -274,12 +274,8 @@ def test_previous_states_with_multiple_flows_parameterized( x is not None for x in [in1_previous_flow_rate, out1_previous_flow_rate, out2_previous_flow_rate] ) if has_previous: - # Check that uptime initial constraints exist in the model (batched naming) - # Note: component uptime constraints use |initial_lb and |initial_ub naming - has_uptime_constraint = ( - 'component|uptime|initial_lb' in model.constraints or 'component|uptime|initial_ub' in model.constraints - ) - assert has_uptime_constraint, 'Uptime initial constraint should exist' + # Check that uptime initial constraint exists in the model (batched naming) + assert 'component|uptime|initial' in model.constraints, 'Uptime initial constraint should exist' else: # When no previous flow rate, no uptime initialization needed pass diff --git a/tests/test_flow.py b/tests/test_flow.py index 2feabf39e..c5a91bcca 100644 --- a/tests/test_flow.py +++ b/tests/test_flow.py @@ -502,7 +502,7 @@ def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_conf assert 'flow|uptime|ub' in model.constraints assert 'flow|uptime|forward' in model.constraints assert 'flow|uptime|backward' in model.constraints - assert 'flow|uptime|initial_ub' in model.constraints + assert 'flow|uptime|initial' in model.constraints # Get individual flow variables uptime = model.variables['flow|uptime'].sel(flow=flow_label, drop=True) @@ -532,9 +532,10 @@ def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_conf + (status.isel(time=slice(1, None)) - 1) * mega, ) + # duration[0] = (previous_duration + dt[0]) * state[0]; previous_uptime=0 assert_conequal( - model.constraints['flow|uptime|initial_ub'].sel(flow=flow_label, drop=True), - uptime.isel(time=0) <= status.isel(time=0) * model.timestep_duration.isel(time=0), + model.constraints['flow|uptime|initial'].sel(flow=flow_label, drop=True), + uptime.isel(time=0) == status.isel(time=0) * model.timestep_duration.isel(time=0), ) def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, coords_config): @@ -564,7 +565,7 @@ def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, co assert 'flow|uptime|ub' in model.constraints assert 'flow|uptime|forward' in model.constraints assert 'flow|uptime|backward' in model.constraints - assert 'flow|uptime|initial_lb' in model.constraints + assert 'flow|uptime|initial' in model.constraints # Get individual flow variables uptime = model.variables['flow|uptime'].sel(flow=flow_label, drop=True) @@ -595,7 +596,7 @@ def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, co ) # Check that initial constraint exists (with previous uptime incorporated) - assert 'flow|uptime|initial_lb' in model.constraints + assert 'flow|uptime|initial' in model.constraints def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_config): """Test flow with minimum and maximum consecutive inactive hours.""" @@ -624,7 +625,7 @@ def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_con assert 'flow|downtime|ub' in model.constraints assert 'flow|downtime|forward' in model.constraints assert 'flow|downtime|backward' in model.constraints - assert 'flow|downtime|initial_ub' in model.constraints + assert 'flow|downtime|initial' in model.constraints # Get individual flow variables downtime = model.variables['flow|downtime'].sel(flow=flow_label, drop=True) @@ -656,9 +657,10 @@ def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_con + (inactive.isel(time=slice(1, None)) - 1) * mega, ) + # duration[0] = (previous_duration + dt[0]) * state[0]; previous_downtime=1h assert_conequal( - model.constraints['flow|downtime|initial_ub'].sel(flow=flow_label, drop=True), - downtime.isel(time=0) <= inactive.isel(time=0) * (model.timestep_duration.isel(time=0) * (1 + 1)), + model.constraints['flow|downtime|initial'].sel(flow=flow_label, drop=True), + downtime.isel(time=0) == inactive.isel(time=0) * (model.timestep_duration.isel(time=0) * (1 + 1)), ) def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, coords_config): @@ -688,7 +690,7 @@ def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, c assert 'flow|downtime|ub' in model.constraints assert 'flow|downtime|forward' in model.constraints assert 'flow|downtime|backward' in model.constraints - assert 'flow|downtime|initial_lb' in model.constraints + assert 'flow|downtime|initial' in model.constraints # Get individual flow variables downtime = model.variables['flow|downtime'].sel(flow=flow_label, drop=True) @@ -719,7 +721,7 @@ def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, c ) # Check that initial constraint exists (with previous downtime incorporated) - assert 'flow|downtime|initial_lb' in model.constraints + assert 'flow|downtime|initial' in model.constraints def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_config): """Test flow with constraints on the number of startups."""