diff --git a/flixopt/batched.py b/flixopt/batched.py index 75ec630ce..35193b494 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -11,6 +11,7 @@ from __future__ import annotations +import logging from functools import cached_property from typing import TYPE_CHECKING @@ -18,6 +19,7 @@ import pandas as pd import xarray as xr +from .core import PlausibilityError from .features import fast_isnull, fast_notnull, stack_along_dim from .interface import InvestParameters, StatusParameters from .modeling import _scalar_safe_isel_drop @@ -29,6 +31,8 @@ from .elements import Bus, Component, Flow from .flow_system import FlowSystem +logger = logging.getLogger('flixopt') + def build_effects_array( effect_dicts: dict[str, dict[str, float | xr.DataArray]], @@ -757,6 +761,85 @@ def charge_state_upper_bounds(self) -> xr.DataArray: """(storage, time_extra) - absolute upper bounds = relative_max * capacity_upper.""" return self.relative_maximum_charge_state_extra * self.capacity_upper + # === Validation === + + def validate(self) -> None: + """Validate all storages (config + DataArray checks). + + Performs both: + - Config validation via Storage.validate_config() + - DataArray validation (post-transformation checks) + + Raises: + PlausibilityError: If any validation check fails. + """ + from .modeling import _scalar_safe_isel + + errors: list[str] = [] + + for storage in self._storages: + storage.validate_config() + sid = storage.label_full + + # Capacity required for non-default relative bounds (DataArray checks) + if storage.capacity_in_flow_hours is None: + if np.any(storage.relative_minimum_charge_state > 0): + errors.append( + f'Storage "{sid}" has relative_minimum_charge_state > 0 but no capacity_in_flow_hours. ' + f'A capacity is required because the lower bound is capacity * relative_minimum_charge_state.' + ) + if np.any(storage.relative_maximum_charge_state < 1): + errors.append( + f'Storage "{sid}" has relative_maximum_charge_state < 1 but no capacity_in_flow_hours. ' + f'A capacity is required because the upper bound is capacity * relative_maximum_charge_state.' + ) + + # Initial charge state vs capacity bounds (DataArray checks) + if storage.capacity_in_flow_hours is not None: + if isinstance(storage.capacity_in_flow_hours, InvestParameters): + minimum_capacity = storage.capacity_in_flow_hours.minimum_or_fixed_size + maximum_capacity = storage.capacity_in_flow_hours.maximum_or_fixed_size + else: + maximum_capacity = storage.capacity_in_flow_hours + minimum_capacity = storage.capacity_in_flow_hours + + min_initial_at_max_capacity = maximum_capacity * _scalar_safe_isel( + storage.relative_minimum_charge_state, {'time': 0} + ) + max_initial_at_min_capacity = minimum_capacity * _scalar_safe_isel( + storage.relative_maximum_charge_state, {'time': 0} + ) + + initial_equals_final = isinstance(storage.initial_charge_state, str) + if not initial_equals_final and storage.initial_charge_state is not None: + if (storage.initial_charge_state > max_initial_at_min_capacity).any(): + errors.append( + f'{sid}: initial_charge_state={storage.initial_charge_state} ' + f'is constraining the investment decision. Choose a value <= {max_initial_at_min_capacity}.' + ) + if (storage.initial_charge_state < min_initial_at_max_capacity).any(): + errors.append( + f'{sid}: initial_charge_state={storage.initial_charge_state} ' + f'is constraining the investment decision. Choose a value >= {min_initial_at_max_capacity}.' + ) + + # Balanced charging/discharging size compatibility (DataArray checks) + if storage.balanced: + charging_min = storage.charging.size.minimum_or_fixed_size + charging_max = storage.charging.size.maximum_or_fixed_size + discharging_min = storage.discharging.size.minimum_or_fixed_size + discharging_max = storage.discharging.size.maximum_or_fixed_size + + if (charging_min > discharging_max).any() or (charging_max < discharging_min).any(): + errors.append( + f'Balancing charging and discharging Flows in {sid} need compatible minimum and maximum sizes. ' + f'Got: charging.size.minimum={charging_min}, charging.size.maximum={charging_max} and ' + f'discharging.size.minimum={discharging_min}, discharging.size.maximum={discharging_max}.' + ) + + if errors: + raise PlausibilityError('\n'.join(errors)) + class FlowsData: """Batched data container for all flows with indexed access. @@ -1486,6 +1569,79 @@ def _broadcast_existing(self, arr: xr.DataArray, dims: list[str] | None = None) return self._ensure_canonical_order(arr) + # === Validation === + + def _any_per_flow(self, arr: xr.DataArray) -> xr.DataArray: + """Reduce to (flow,) by collapsing all non-flow dims with .any().""" + non_flow_dims = [d for d in arr.dims if d != self.dim_name] + return arr.any(dim=non_flow_dims) if non_flow_dims else arr + + def _flagged_ids(self, mask: xr.DataArray) -> list[str]: + """Return flow IDs where mask is True.""" + return [fid for fid, flag in zip(self.ids, mask.values, strict=False) if flag] + + def validate(self) -> None: + """Validate all flows (config + DataArray checks). + + Performs both: + - Config validation via Flow.validate_config() + - DataArray validation (post-transformation checks) + + Raises: + PlausibilityError: If any validation check fails. + """ + if not self.elements: + return + + for flow in self.elements.values(): + flow.validate_config() + + errors: list[str] = [] + + # Batched checks: relative_minimum <= relative_maximum + invalid_bounds = self._any_per_flow(self.relative_minimum > self.relative_maximum) + if invalid_bounds.any(): + errors.append(f'relative_minimum > relative_maximum for flows: {self._flagged_ids(invalid_bounds)}') + + # Check: size required when relative_minimum > 0 + has_nonzero_min = self._any_per_flow(self.relative_minimum > 0) + if (has_nonzero_min & ~self.has_size).any(): + errors.append( + f'relative_minimum > 0 but no size defined for flows: ' + f'{self._flagged_ids(has_nonzero_min & ~self.has_size)}. ' + f'A size is required because the lower bound is size * relative_minimum.' + ) + + # Check: size required when relative_maximum < 1 + has_nondefault_max = self._any_per_flow(self.relative_maximum < 1) + if (has_nondefault_max & ~self.has_size).any(): + errors.append( + f'relative_maximum < 1 but no size defined for flows: ' + f'{self._flagged_ids(has_nondefault_max & ~self.has_size)}. ' + f'A size is required because the upper bound is size * relative_maximum.' + ) + + # Warning: relative_minimum > 0 without status_parameters prevents switching inactive + has_nonzero_min_no_status = has_nonzero_min & ~self.has_status + if has_nonzero_min_no_status.any(): + logger.warning( + f'Flows {self._flagged_ids(has_nonzero_min_no_status)} have relative_minimum > 0 ' + f'and no status_parameters. This prevents the flow from switching inactive (flow_rate = 0). ' + f'Consider using status_parameters to allow switching active and inactive.' + ) + + # Warning: status_parameters with relative_minimum=0 allows status=1 with flow=0 + has_zero_min_with_status = ~has_nonzero_min & self.has_status + if has_zero_min_with_status.any(): + logger.warning( + f'Flows {self._flagged_ids(has_zero_min_with_status)} have status_parameters but ' + f'relative_minimum=0. This allows status=1 with flow=0, which may lead to unexpected ' + f'behavior. Consider setting relative_minimum > 0 to ensure the unit produces when active.' + ) + + if errors: + raise PlausibilityError('\n'.join(errors)) + class EffectsData: """Batched data container for all effects. @@ -1604,6 +1760,45 @@ def values(self): """Iterate over Effect objects.""" return self._effects + def validate(self) -> None: + """Validate all effects and the effect collection structure. + + Performs both: + - Individual effect config validation + - Collection-level validation (circular loops in share mappings, unknown effect refs) + """ + for effect in self._effects: + effect.validate_config() + + # Collection-level validation (share structure) + self._validate_share_structure() + + def _validate_share_structure(self) -> None: + """Validate effect share mappings for cycles and unknown references.""" + from .effects import detect_cycles, tuples_to_adjacency_list + + temporal, periodic = self._collection.calculate_effect_share_factors() + + # Validate all referenced effects exist + edges = list(temporal.keys()) + list(periodic.keys()) + unknown_sources = {src for src, _ in edges if src not in self._collection} + unknown_targets = {tgt for _, tgt in edges if tgt not in self._collection} + unknown = unknown_sources | unknown_targets + if unknown: + raise KeyError(f'Unknown effects used in effect share mappings: {sorted(unknown)}') + + # Check for circular dependencies + temporal_cycles = detect_cycles(tuples_to_adjacency_list([key for key in temporal])) + periodic_cycles = detect_cycles(tuples_to_adjacency_list([key for key in periodic])) + + if temporal_cycles: + cycle_str = '\n'.join([' -> '.join(cycle) for cycle in temporal_cycles]) + raise ValueError(f'Error: circular temporal-shares detected:\n{cycle_str}') + + if periodic_cycles: + cycle_str = '\n'.join([' -> '.join(cycle) for cycle in periodic_cycles]) + raise ValueError(f'Error: circular periodic-shares detected:\n{cycle_str}') + class BusesData: """Batched data container for buses.""" @@ -1630,6 +1825,23 @@ def imbalance_elements(self) -> list[Bus]: """Bus objects that allow imbalance.""" return [b for b in self._buses if b.allows_imbalance] + def validate(self) -> None: + """Validate all buses (config + DataArray checks). + + Performs both: + - Config validation via Bus.validate_config() + - DataArray validation (post-transformation checks) + """ + for bus in self._buses: + bus.validate_config() + # Warning: imbalance_penalty == 0 (DataArray check) + if bus.imbalance_penalty_per_flow_hour is not None: + zero_penalty = np.all(np.equal(bus.imbalance_penalty_per_flow_hour, 0)) + if zero_penalty: + logger.warning( + f'In Bus {bus.label_full}, the imbalance_penalty_per_flow_hour is 0. Use "None" or a value > 0.' + ) + class ComponentsData: """Batched data container for components with status.""" @@ -1651,6 +1863,18 @@ def dim_name(self) -> str: def all_components(self) -> list[Component]: return self._all_components + def validate(self) -> None: + """Validate generic components (config checks only). + + Note: Storage, Transmission, and LinearConverter are validated + through their specialized *Data classes, so we skip them here. + """ + from .components import LinearConverter, Storage, Transmission + + for component in self._all_components: + if not isinstance(component, (Storage, LinearConverter, Transmission)): + component.validate_config() + class ConvertersData: """Batched data container for converters.""" @@ -1677,6 +1901,11 @@ def with_piecewise(self) -> list[LinearConverter]: """Converters with piecewise_conversion.""" return [c for c in self._converters if c.piecewise_conversion] + def validate(self) -> None: + """Validate all converters (config checks, no DataArray operations needed).""" + for converter in self._converters: + converter.validate_config() + class TransmissionsData: """Batched data container for transmissions.""" @@ -1703,17 +1932,65 @@ def balanced(self) -> list[Transmission]: """Transmissions with balanced flow sizes.""" return [t for t in self._transmissions if t.balanced] + def validate(self) -> None: + """Validate all transmissions (config + DataArray checks). + + Performs both: + - Config validation via Transmission.validate_config() + - DataArray validation (post-transformation checks) + + Raises: + PlausibilityError: If any validation check fails. + """ + for transmission in self._transmissions: + transmission.validate_config() + + errors: list[str] = [] + + for transmission in self._transmissions: + tid = transmission.label_full + + # Balanced size compatibility (DataArray check) + if transmission.balanced: + in1_min = transmission.in1.size.minimum_or_fixed_size + in1_max = transmission.in1.size.maximum_or_fixed_size + in2_min = transmission.in2.size.minimum_or_fixed_size + in2_max = transmission.in2.size.maximum_or_fixed_size + + if (in1_min > in2_max).any() or (in1_max < in2_min).any(): + errors.append( + f'Balanced Transmission {tid} needs compatible minimum and maximum sizes. ' + f'Got: in1.size.minimum={in1_min}, in1.size.maximum={in1_max} and ' + f'in2.size.minimum={in2_min}, in2.size.maximum={in2_max}.' + ) + + if errors: + raise PlausibilityError('\n'.join(errors)) + class BatchedAccessor: """Accessor for batched data containers on FlowSystem. + Provides cached access to *Data containers for all element types. + The same cached instances are used for both validation (during connect_and_transform) + and model building, ensuring consistency and avoiding duplicate object creation. + Usage: - flow_system.batched.flows # Access FlowsData + flow_system.batched.flows # Access FlowsData + flow_system.batched.storages # Access StoragesData + flow_system.batched.buses # Access BusesData """ def __init__(self, flow_system: FlowSystem): self._fs = flow_system self._flows: FlowsData | None = None + self._storages: StoragesData | None = None + self._intercluster_storages: StoragesData | None = None + self._buses: BusesData | None = None + self._effects: EffectsData | None = None + self._components: ComponentsData | None = None + self._converters: ConvertersData | None = None + self._transmissions: TransmissionsData | None = None @property def flows(self) -> FlowsData: @@ -1723,6 +2000,93 @@ def flows(self) -> FlowsData: self._flows = FlowsData(all_flows, self._fs) return self._flows + @property + def storages(self) -> StoragesData: + """Get or create StoragesData for basic storages (excludes intercluster).""" + if self._storages is None: + from .components import Storage + + clustering = self._fs.clustering + basic_storages = [ + c + for c in self._fs.components.values() + if isinstance(c, Storage) + and not (clustering is not None and c.cluster_mode in ('intercluster', 'intercluster_cyclic')) + ] + effect_ids = list(self._fs.effects.keys()) + self._storages = StoragesData( + basic_storages, 'storage', effect_ids, timesteps_extra=self._fs.timesteps_extra + ) + return self._storages + + @property + def intercluster_storages(self) -> StoragesData: + """Get or create StoragesData for intercluster storages.""" + if self._intercluster_storages is None: + from .components import Storage + + clustering = self._fs.clustering + intercluster = [ + c + for c in self._fs.components.values() + if isinstance(c, Storage) + and clustering is not None + and c.cluster_mode in ('intercluster', 'intercluster_cyclic') + ] + effect_ids = list(self._fs.effects.keys()) + self._intercluster_storages = StoragesData(intercluster, 'intercluster_storage', effect_ids) + return self._intercluster_storages + + @property + def buses(self) -> BusesData: + """Get or create BusesData for all buses.""" + if self._buses is None: + self._buses = BusesData(list(self._fs.buses.values())) + return self._buses + + @property + def effects(self) -> EffectsData: + """Get or create EffectsData for all effects.""" + if self._effects is None: + self._effects = EffectsData(self._fs.effects) + return self._effects + + @property + def components(self) -> ComponentsData: + """Get or create ComponentsData for all components.""" + if self._components is None: + all_components = list(self._fs.components.values()) + components_with_status = [c for c in all_components if c.status_parameters is not None] + self._components = ComponentsData(components_with_status, all_components) + return self._components + + @property + def converters(self) -> ConvertersData: + """Get or create ConvertersData for all converters.""" + if self._converters is None: + from .components import LinearConverter + + converters = [c for c in self._fs.components.values() if isinstance(c, LinearConverter)] + self._converters = ConvertersData(converters) + return self._converters + + @property + def transmissions(self) -> TransmissionsData: + """Get or create TransmissionsData for all transmissions.""" + if self._transmissions is None: + from .components import Transmission + + transmissions = [c for c in self._fs.components.values() if isinstance(c, Transmission)] + self._transmissions = TransmissionsData(transmissions) + return self._transmissions + def _reset(self) -> None: - """Reset cached data (called when FlowSystem changes).""" + """Reset all cached data (called when FlowSystem is invalidated).""" self._flows = None + self._storages = None + self._intercluster_storages = None + self._buses = None + self._effects = None + self._components = None + self._converters = None + self._transmissions = None diff --git a/flixopt/components.py b/flixopt/components.py index f202128ae..9d759b2ce 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -17,7 +17,7 @@ from .elements import Component, Flow from .features import MaskHelpers, stack_along_dim from .interface import InvestParameters, PiecewiseConversion, StatusParameters -from .modeling import _scalar_safe_isel, _scalar_safe_reduce +from .modeling import _scalar_safe_reduce from .structure import ( FlowSystemModel, FlowVarName, @@ -190,8 +190,13 @@ def link_to_flow_system(self, flow_system, prefix: str = '') -> None: if self.piecewise_conversion is not None: self.piecewise_conversion.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseConversion')) - def _plausibility_checks(self) -> None: - super()._plausibility_checks() + def validate_config(self) -> None: + """Validate configuration consistency. + + Called BEFORE transformation via FlowSystem._run_config_validation(). + These are simple checks that don't require DataArray operations. + """ + super().validate_config() if not self.conversion_factors and not self.piecewise_conversion: raise PlausibilityError('Either conversion_factors or piecewise_conversion must be defined!') if self.conversion_factors and self.piecewise_conversion: @@ -220,6 +225,10 @@ def _plausibility_checks(self) -> None: f'({flow.label_full}).' ) + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config().""" + self.validate_config() + def transform_data(self) -> None: super().transform_data() if self.conversion_factors: @@ -495,31 +504,21 @@ def transform_data(self) -> None: f'{self.prefix}|capacity_in_flow_hours', self.capacity_in_flow_hours, dims=['period', 'scenario'] ) - def _plausibility_checks(self) -> None: - """ - Check for infeasible or uncommon combinations of parameters + def validate_config(self) -> None: + """Validate configuration consistency. + + Called BEFORE transformation via FlowSystem._run_config_validation(). + These are simple checks that don't require DataArray operations. """ - super()._plausibility_checks() + super().validate_config() - # Validate string values and set flag - initial_equals_final = False + # Validate string values for initial_charge_state if isinstance(self.initial_charge_state, str): - if not self.initial_charge_state == 'equals_final': + if self.initial_charge_state != 'equals_final': raise PlausibilityError(f'initial_charge_state has undefined value: {self.initial_charge_state}') - initial_equals_final = True - # Capacity is required when using non-default relative bounds + # Capacity is required for final charge state constraints (simple None checks) if self.capacity_in_flow_hours is None: - if np.any(self.relative_minimum_charge_state > 0): - raise PlausibilityError( - f'Storage "{self.label_full}" has relative_minimum_charge_state > 0 but no capacity_in_flow_hours. ' - f'A capacity is required because the lower bound is capacity * relative_minimum_charge_state.' - ) - if np.any(self.relative_maximum_charge_state < 1): - raise PlausibilityError( - f'Storage "{self.label_full}" has relative_maximum_charge_state < 1 but no capacity_in_flow_hours. ' - f'A capacity is required because the upper bound is capacity * relative_maximum_charge_state.' - ) if self.relative_minimum_final_charge_state is not None: raise PlausibilityError( f'Storage "{self.label_full}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. ' @@ -531,39 +530,7 @@ def _plausibility_checks(self) -> None: f'A capacity is required for relative final charge state constraints.' ) - # Skip capacity-related checks if capacity is None (unbounded) - if self.capacity_in_flow_hours is not None: - # Use new InvestParameters methods to get capacity bounds - if isinstance(self.capacity_in_flow_hours, InvestParameters): - minimum_capacity = self.capacity_in_flow_hours.minimum_or_fixed_size - maximum_capacity = self.capacity_in_flow_hours.maximum_or_fixed_size - else: - maximum_capacity = self.capacity_in_flow_hours - minimum_capacity = self.capacity_in_flow_hours - - # Initial charge state should not constrain investment decision - # If initial > (min_cap * rel_max), investment is forced to increase capacity - # If initial < (max_cap * rel_min), investment is forced to decrease capacity - min_initial_at_max_capacity = maximum_capacity * _scalar_safe_isel( - self.relative_minimum_charge_state, {'time': 0} - ) - max_initial_at_min_capacity = minimum_capacity * _scalar_safe_isel( - self.relative_maximum_charge_state, {'time': 0} - ) - - # Only perform numeric comparisons if using a numeric initial_charge_state - if not initial_equals_final and self.initial_charge_state is not None: - if (self.initial_charge_state > max_initial_at_min_capacity).any(): - raise PlausibilityError( - f'{self.label_full}: {self.initial_charge_state=} ' - f'is constraining the investment decision. Choose a value <= {max_initial_at_min_capacity}.' - ) - if (self.initial_charge_state < min_initial_at_max_capacity).any(): - raise PlausibilityError( - f'{self.label_full}: {self.initial_charge_state=} ' - f'is constraining the investment decision. Choose a value >= {min_initial_at_max_capacity}.' - ) - + # Balanced requires InvestParameters on charging/discharging flows if self.balanced: if not isinstance(self.charging.size, InvestParameters) or not isinstance( self.discharging.size, InvestParameters @@ -572,14 +539,12 @@ def _plausibility_checks(self) -> None: f'Balancing charging and discharging Flows in {self.label_full} is only possible with Investments.' ) - if (self.charging.size.minimum_or_fixed_size > self.discharging.size.maximum_or_fixed_size).any() or ( - self.charging.size.maximum_or_fixed_size < self.discharging.size.minimum_or_fixed_size - ).any(): - raise PlausibilityError( - f'Balancing charging and discharging Flows in {self.label_full} need compatible minimum and maximum sizes.' - f'Got: {self.charging.size.minimum_or_fixed_size=}, {self.charging.size.maximum_or_fixed_size=} and ' - f'{self.discharging.size.minimum_or_fixed_size=}, {self.discharging.size.maximum_or_fixed_size=}.' - ) + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config(). + + DataArray-based checks moved to StoragesData.validate(). + """ + self.validate_config() def __repr__(self) -> str: """Return string representation.""" @@ -737,31 +702,38 @@ def __init__( self.absolute_losses = absolute_losses self.balanced = balanced - def _plausibility_checks(self): - super()._plausibility_checks() - # check buses: + def validate_config(self) -> None: + """Validate configuration consistency. + + Called BEFORE transformation via FlowSystem._run_config_validation(). + These are simple checks that don't require DataArray operations. + """ + super().validate_config() + # Check buses consistency if self.in2 is not None: - assert self.in2.bus == self.out1.bus, ( - f'Output 1 and Input 2 do not start/end at the same Bus: {self.out1.bus=}, {self.in2.bus=}' - ) + if self.in2.bus != self.out1.bus: + raise ValueError( + f'Output 1 and Input 2 do not start/end at the same Bus: {self.out1.bus=}, {self.in2.bus=}' + ) if self.out2 is not None: - assert self.out2.bus == self.in1.bus, ( - f'Input 1 and Output 2 do not start/end at the same Bus: {self.in1.bus=}, {self.out2.bus=}' - ) + if self.out2.bus != self.in1.bus: + raise ValueError( + f'Input 1 and Output 2 do not start/end at the same Bus: {self.in1.bus=}, {self.out2.bus=}' + ) + # Balanced requires InvestParameters on both in-Flows if self.balanced: if self.in2 is None: raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows') if not isinstance(self.in1.size, InvestParameters) or not isinstance(self.in2.size, InvestParameters): raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows') - if (self.in1.size.minimum_or_fixed_size > self.in2.size.maximum_or_fixed_size).any() or ( - self.in1.size.maximum_or_fixed_size < self.in2.size.minimum_or_fixed_size - ).any(): - raise ValueError( - f'Balanced Transmission needs compatible minimum and maximum sizes.' - f'Got: {self.in1.size.minimum_or_fixed_size=}, {self.in1.size.maximum_or_fixed_size=} and ' - f'{self.in2.size.minimum_or_fixed_size=}, {self.in2.size.maximum_or_fixed_size=}.' - ) + + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config(). + + DataArray-based checks moved to TransmissionsData.validate(). + """ + self.validate_config() def _propagate_status_parameters(self) -> None: super()._propagate_status_parameters() diff --git a/flixopt/effects.py b/flixopt/effects.py index db6aad855..579d0a94e 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -290,7 +290,12 @@ def transform_data(self) -> None: f'{self.prefix}|period_weights', self.period_weights, dims=['period', 'scenario'] ) - def _plausibility_checks(self) -> None: + def validate_config(self) -> None: + """Validate configuration consistency. + + Called BEFORE transformation via FlowSystem._run_config_validation(). + These are simple checks that don't require DataArray operations. + """ # Check that minimum_over_periods and maximum_over_periods require a period dimension if ( self.minimum_over_periods is not None or self.maximum_over_periods is not None @@ -301,6 +306,10 @@ def _plausibility_checks(self) -> None: f'the FlowSystem, or remove these constraints.' ) + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config().""" + self.validate_config() + class EffectsModel: """Type-level model for ALL effects with batched variables using 'effect' dimension. @@ -813,28 +822,21 @@ def get_effect_label(eff: str | None) -> str: return {get_effect_label(effect): value for effect, value in effect_values_user.items()} return {self.standard_effect.label: effect_values_user} + def validate_config(self) -> None: + """Deprecated: Validation is now handled by EffectsData.validate(). + + This method is kept for backwards compatibility but does nothing. + Collection-level validation (cycles, unknown refs) is now in EffectsData._validate_share_structure(). + """ + pass + def _plausibility_checks(self) -> None: - # Check circular loops in effects: - temporal, periodic = self.calculate_effect_share_factors() - - # Validate all referenced effects (both sources and targets) exist - edges = list(temporal.keys()) + list(periodic.keys()) - unknown_sources = {src for src, _ in edges if src not in self} - unknown_targets = {tgt for _, tgt in edges if tgt not in self} - unknown = unknown_sources | unknown_targets - if unknown: - raise KeyError(f'Unknown effects used in effect share mappings: {sorted(unknown)}') - - temporal_cycles = detect_cycles(tuples_to_adjacency_list([key for key in temporal])) - periodic_cycles = detect_cycles(tuples_to_adjacency_list([key for key in periodic])) - - if temporal_cycles: - cycle_str = '\n'.join([' -> '.join(cycle) for cycle in temporal_cycles]) - raise ValueError(f'Error: circular temporal-shares detected:\n{cycle_str}') - - if periodic_cycles: - cycle_str = '\n'.join([' -> '.join(cycle) for cycle in periodic_cycles]) - raise ValueError(f'Error: circular periodic-shares detected:\n{cycle_str}') + """Deprecated: Legacy validation method. + + Kept for backwards compatibility but does nothing. + Validation is now handled by EffectsData.validate(). + """ + pass def __getitem__(self, effect: str | Effect | None) -> Effect: """ diff --git a/flixopt/elements.py b/flixopt/elements.py index 1f5a8a99f..f8f7f257d 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -237,7 +237,12 @@ def _check_unique_flow_labels(self, inputs: list = None, outputs: list = None): duplicates = {label for label in all_flow_labels if all_flow_labels.count(label) > 1} raise ValueError(f'Flow names must be unique! "{self.label_full}" got 2 or more of: {duplicates}') - def _plausibility_checks(self) -> None: + def validate_config(self) -> None: + """Validate configuration consistency. + + Called BEFORE transformation via FlowSystem._run_config_validation(). + These are simple checks that don't require DataArray operations. + """ self._check_unique_flow_labels() # Component with status_parameters requires all flows to have sizes set @@ -251,9 +256,9 @@ def _plausibility_checks(self) -> None: f'(required for big-M constraints).' ) - # Run plausibility checks on all flows - for flow in self.flows.values(): - flow._plausibility_checks() + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config().""" + self.validate_config() def _connect_flows(self, inputs=None, outputs=None): if inputs is None: @@ -403,18 +408,24 @@ def transform_data(self) -> None: f'{self.prefix}|imbalance_penalty_per_flow_hour', self.imbalance_penalty_per_flow_hour ) - def _plausibility_checks(self) -> None: - if self.imbalance_penalty_per_flow_hour is not None: - zero_penalty = np.all(np.equal(self.imbalance_penalty_per_flow_hour, 0)) - if zero_penalty: - logger.warning( - f'In Bus {self.label_full}, the imbalance_penalty_per_flow_hour is 0. Use "None" or a value > 0.' - ) + def validate_config(self) -> None: + """Validate configuration consistency. + + Called BEFORE transformation via FlowSystem._run_config_validation(). + These are simple checks that don't require DataArray operations. + """ if len(self.inputs) == 0 and len(self.outputs) == 0: raise ValueError( f'Bus "{self.label_full}" has no Flows connected to it. Please remove it from the FlowSystem' ) + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config(). + + DataArray-based checks (imbalance_penalty warning) moved to BusesData.validate(). + """ + self.validate_config() + @property def allows_imbalance(self) -> bool: return self.imbalance_penalty_per_flow_hour is not None @@ -665,11 +676,12 @@ def transform_data(self) -> None: elif self.size is not None: self.size = self._fit_coords(f'{self.prefix}|size', self.size, dims=['period', 'scenario']) - def _plausibility_checks(self) -> None: - # TODO: Incorporate into Variable? (Lower_bound can not be greater than upper bound - if (self.relative_minimum > self.relative_maximum).any(): - raise PlausibilityError(self.label_full + ': Take care, that relative_minimum <= relative_maximum!') + def validate_config(self) -> None: + """Validate configuration consistency. + Called BEFORE transformation via FlowSystem._run_config_validation(). + These are simple checks that don't require DataArray operations. + """ # Size is required when using StatusParameters (for big-M constraints) if self.status_parameters is not None and self.size is None: raise PlausibilityError( @@ -683,19 +695,6 @@ def _plausibility_checks(self) -> None: f'A size is required because flow_rate = size * fixed_relative_profile.' ) - # Size is required when using non-default relative bounds (flow_rate = size * relative_bound) - if self.size is None and np.any(self.relative_minimum > 0): - raise PlausibilityError( - f'Flow "{self.label_full}" has relative_minimum > 0 but no size defined. ' - f'A size is required because the lower bound is size * relative_minimum.' - ) - - if self.size is None and np.any(self.relative_maximum < 1): - raise PlausibilityError( - f'Flow "{self.label_full}" has relative_maximum != 1 but no size defined. ' - f'A size is required because the upper bound is size * relative_maximum.' - ) - # Size is required for load factor constraints (total_flow_hours / size) if self.size is None and self.load_factor_min is not None: raise PlausibilityError( @@ -709,28 +708,7 @@ def _plausibility_checks(self) -> None: f'A size is required because the constraint is total_flow_hours <= size * load_factor_max * hours.' ) - if self.fixed_relative_profile is not None and self.status_parameters is not None: - logger.warning( - f'Flow {self.label_full} has both a fixed_relative_profile and status_parameters.' - f'This will allow the flow to be switched active and inactive, effectively differing from the fixed_flow_rate.' - ) - - # Warn when StatusParameters is used with relative_minimum=0, as the status variable - # becomes less meaningful - the unit can be "on" (status=1) while producing nothing (flow=0) - if self.status_parameters is not None and not np.any(self.relative_minimum > 0): - logger.warning( - f'Flow "{self.label_full}" has status_parameters but relative_minimum=0. ' - f'This allows status=1 with flow=0, which may lead to unexpected behavior. ' - f'Consider setting relative_minimum > 0 to ensure the unit produces when active.' - ) - - if np.any(self.relative_minimum > 0) and self.status_parameters is None: - logger.warning( - f'Flow {self.label_full} has a relative_minimum of {self.relative_minimum} and no status_parameters. ' - f'This prevents the Flow from switching inactive (flow_rate = 0). ' - f'Consider using status_parameters to allow the Flow to be switched active and inactive.' - ) - + # Validate previous_flow_rate type if self.previous_flow_rate is not None: if not any( [ @@ -739,10 +717,25 @@ def _plausibility_checks(self) -> None: ] ): raise TypeError( - f'previous_flow_rate must be None, a scalar, a list of scalars or a 1D-numpy-array. Got {type(self.previous_flow_rate)}. ' + f'previous_flow_rate must be None, a scalar, a list of scalars or a 1D-numpy-array. ' + f'Got {type(self.previous_flow_rate)}. ' f'Different values in different periods or scenarios are not yet supported.' ) + # Warning: fixed_relative_profile + status_parameters is unusual + if self.fixed_relative_profile is not None and self.status_parameters is not None: + logger.warning( + f'Flow {self.label_full} has both a fixed_relative_profile and status_parameters. ' + f'This will allow the flow to be switched active and inactive, effectively differing from the fixed_flow_rate.' + ) + + def _plausibility_checks(self) -> None: + """Legacy validation method - delegates to validate_config(). + + DataArray-based validation is now done in FlowsData.validate(). + """ + self.validate_config() + @property def label_full(self) -> str: return f'{self.component}({self.label})' diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 54e76d333..73294f5e7 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -228,7 +228,7 @@ def __init__( self._solution: xr.Dataset | None = None # Aggregation info - populated by transform.cluster() - self.clustering: Clustering | None = None + self._clustering: Clustering | None = None # Statistics accessor cache - lazily initialized, invalidated on new solution self._statistics: StatisticsAccessor | None = None @@ -743,9 +743,11 @@ def connect_and_transform(self): for element in chain(self.components.values(), self.effects.values(), self.buses.values()): element.transform_data() - # Validate cross-element references immediately after transformation + # Validate cross-element references after transformation self._validate_system_integrity() - self._run_plausibility_checks() + + # Unified validation AFTER transformation (config + DataArray checks) + self._run_validation() self._connected_and_transformed = True @@ -1391,6 +1393,32 @@ def batched(self) -> BatchedAccessor: self._batched = BatchedAccessor(self) return self._batched + @property + def clustering(self) -> Clustering | None: + """Clustering metadata for this FlowSystem. + + This property is populated by `transform.cluster()` or when loading + a clustered FlowSystem from file. It contains information about the + original timesteps, cluster assignments, and aggregation metrics. + + Setting this property resets the batched accessor cache to ensure + storage categorization (basic vs intercluster) is correctly computed + based on the new clustering state. + + Returns: + Clustering object if this is a clustered FlowSystem, None otherwise. + """ + return self._clustering + + @clustering.setter + def clustering(self, value: Clustering | None) -> None: + """Set clustering and reset batched accessor cache.""" + self._clustering = value + # Reset batched accessor so storage categorization is recomputed + # with the new clustering state (basic vs intercluster storages) + if self._batched is not None: + self._batched._reset() + def plot_network( self, path: bool | str | pathlib.Path = 'flow_system.html', @@ -1485,20 +1513,36 @@ def _check_if_element_already_assigned(self, element: Element) -> None: ) def _prepare_effects(self) -> None: - """Validate effect collection and create the penalty effect if needed. + """Create the penalty effect if needed. Called before transform_data() so the penalty effect gets transformed. + Validation is done after transformation via _run_validation(). """ - self.effects._plausibility_checks() if self.effects._penalty_effect is None: penalty = self.effects._create_penalty_effect() if penalty._flow_system is None: penalty.link_to_flow_system(self) - def _run_plausibility_checks(self) -> None: - """Run plausibility checks on all elements after data transformation.""" - for element in chain(self.components.values(), self.effects.values(), self.buses.values()): - element._plausibility_checks() + def _run_validation(self) -> None: + """Run all validation through batched *Data classes. + + Each *Data.validate() method handles both: + - Config validation (simple checks) + - DataArray validation (post-transformation checks) + + Called after transform_data(). The cached *Data instances are + reused during model building. + """ + batched = self.batched + # Validate buses first - catches "Bus with no flows" before FlowsData fails on empty arrays + batched.buses.validate() + batched.effects.validate() + batched.flows.validate() + batched.storages.validate() + batched.intercluster_storages.validate() + batched.converters.validate() + batched.transmissions.validate() + batched.components.validate() def _validate_system_integrity(self) -> None: """ diff --git a/flixopt/structure.py b/flixopt/structure.py index f01566569..db6b45dff 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -180,15 +180,15 @@ class _FlowConstraint: UPTIME_UB = f'{FlowVarName.UPTIME}|ub' UPTIME_FORWARD = f'{FlowVarName.UPTIME}|forward' UPTIME_BACKWARD = f'{FlowVarName.UPTIME}|backward' - UPTIME_INITIAL_UB = f'{FlowVarName.UPTIME}|initial_ub' - UPTIME_INITIAL_LB = f'{FlowVarName.UPTIME}|initial_lb' + UPTIME_INITIAL = f'{FlowVarName.UPTIME}|initial' + UPTIME_INITIAL_CONTINUATION = f'{FlowVarName.UPTIME}|initial_continuation' # Downtime tracking constraints (built from variable name) DOWNTIME_UB = f'{FlowVarName.DOWNTIME}|ub' DOWNTIME_FORWARD = f'{FlowVarName.DOWNTIME}|forward' DOWNTIME_BACKWARD = f'{FlowVarName.DOWNTIME}|backward' - DOWNTIME_INITIAL_UB = f'{FlowVarName.DOWNTIME}|initial_ub' - DOWNTIME_INITIAL_LB = f'{FlowVarName.DOWNTIME}|initial_lb' + DOWNTIME_INITIAL = f'{FlowVarName.DOWNTIME}|initial' + DOWNTIME_INITIAL_CONTINUATION = f'{FlowVarName.DOWNTIME}|initial_continuation' FlowVarName.Constraint = _FlowConstraint @@ -233,15 +233,15 @@ class _ComponentConstraint: UPTIME_UB = f'{ComponentVarName.UPTIME}|ub' UPTIME_FORWARD = f'{ComponentVarName.UPTIME}|forward' UPTIME_BACKWARD = f'{ComponentVarName.UPTIME}|backward' - UPTIME_INITIAL_UB = f'{ComponentVarName.UPTIME}|initial_ub' - UPTIME_INITIAL_LB = f'{ComponentVarName.UPTIME}|initial_lb' + UPTIME_INITIAL = f'{ComponentVarName.UPTIME}|initial' + UPTIME_INITIAL_CONTINUATION = f'{ComponentVarName.UPTIME}|initial_continuation' # Downtime tracking constraints DOWNTIME_UB = f'{ComponentVarName.DOWNTIME}|ub' DOWNTIME_FORWARD = f'{ComponentVarName.DOWNTIME}|forward' DOWNTIME_BACKWARD = f'{ComponentVarName.DOWNTIME}|backward' - DOWNTIME_INITIAL_UB = f'{ComponentVarName.DOWNTIME}|initial_ub' - DOWNTIME_INITIAL_LB = f'{ComponentVarName.DOWNTIME}|initial_lb' + DOWNTIME_INITIAL = f'{ComponentVarName.DOWNTIME}|initial' + DOWNTIME_INITIAL_CONTINUATION = f'{ComponentVarName.DOWNTIME}|initial_continuation' ComponentVarName.Constraint = _ComponentConstraint @@ -893,15 +893,7 @@ def build_model(self, timing: bool = False): Args: timing: If True, print detailed timing breakdown. """ - from .batched import ( - BusesData, - ComponentsData, - ConvertersData, - EffectsData, - StoragesData, - TransmissionsData, - ) - from .components import InterclusterStoragesModel, LinearConverter, Storage, StoragesModel, Transmission + from .components import InterclusterStoragesModel, StoragesModel from .effects import EffectsModel from .elements import ( BusesModel, @@ -913,65 +905,40 @@ def build_model(self, timing: bool = False): timer = _BuildTimer() if timing else None - self.effects = EffectsModel(self, EffectsData(self.flow_system.effects)) + # Use cached *Data from BatchedAccessor (same instances used for validation) + batched = self.flow_system.batched + + self.effects = EffectsModel(self, batched.effects) if timer: timer.record('effects') - self._flows_model = FlowsModel(self, self.flow_system.batched.flows) + self._flows_model = FlowsModel(self, batched.flows) if timer: timer.record('flows') - self._buses_model = BusesModel(self, BusesData(list(self.flow_system.buses.values())), self._flows_model) + self._buses_model = BusesModel(self, batched.buses, self._flows_model) if timer: timer.record('buses') - all_components = list(self.flow_system.components.values()) - effect_ids = list(self.flow_system.effects.keys()) - clustering = self.flow_system.clustering - - basic_storages = [ - c - for c in all_components - if isinstance(c, Storage) - and not (clustering is not None and c.cluster_mode in ('intercluster', 'intercluster_cyclic')) - ] - self._storages_model = StoragesModel( - self, - StoragesData(basic_storages, 'storage', effect_ids, timesteps_extra=self.flow_system.timesteps_extra), - self._flows_model, - ) + self._storages_model = StoragesModel(self, batched.storages, self._flows_model) if timer: timer.record('storages') - intercluster_storages = [ - c - for c in all_components - if isinstance(c, Storage) - and clustering is not None - and c.cluster_mode in ('intercluster', 'intercluster_cyclic') - ] self._intercluster_storages_model = InterclusterStoragesModel( - self, - StoragesData(intercluster_storages, 'intercluster_storage', effect_ids), - self._flows_model, + self, batched.intercluster_storages, self._flows_model ) if timer: timer.record('intercluster_storages') - components_with_status = [c for c in all_components if c.status_parameters is not None] - self._components_model = ComponentsModel( - self, ComponentsData(components_with_status, all_components), self._flows_model - ) + self._components_model = ComponentsModel(self, batched.components, self._flows_model) if timer: timer.record('components') - converters = [c for c in all_components if isinstance(c, LinearConverter)] - self._converters_model = ConvertersModel(self, ConvertersData(converters), self._flows_model) + self._converters_model = ConvertersModel(self, batched.converters, self._flows_model) if timer: timer.record('converters') - transmissions = [c for c in all_components if isinstance(c, Transmission)] - self._transmissions_model = TransmissionsModel(self, TransmissionsData(transmissions), self._flows_model) + self._transmissions_model = TransmissionsModel(self, batched.transmissions, self._flows_model) if timer: timer.record('transmissions') diff --git a/flixopt/transform_accessor.py b/flixopt/transform_accessor.py index ab1da4d29..0bc8b089d 100644 --- a/flixopt/transform_accessor.py +++ b/flixopt/transform_accessor.py @@ -374,6 +374,8 @@ def build(self, ds: xr.Dataset) -> FlowSystem: storage.initial_charge_state = None # Create Clustering object with full AggregationResult access + # Note: The clustering setter automatically resets the batched accessor + # to ensure storage categorization (basic vs intercluster) is recomputed. reduced_fs.clustering = Clustering( original_timesteps=self._fs.timesteps, original_data=drop_constant_arrays(ds, dim='time'), diff --git a/tests/test_component.py b/tests/test_component.py index 12a726a94..77b2cd471 100644 --- a/tests/test_component.py +++ b/tests/test_component.py @@ -274,12 +274,8 @@ def test_previous_states_with_multiple_flows_parameterized( x is not None for x in [in1_previous_flow_rate, out1_previous_flow_rate, out2_previous_flow_rate] ) if has_previous: - # Check that uptime initial constraints exist in the model (batched naming) - # Note: component uptime constraints use |initial_lb and |initial_ub naming - has_uptime_constraint = ( - 'component|uptime|initial_lb' in model.constraints or 'component|uptime|initial_ub' in model.constraints - ) - assert has_uptime_constraint, 'Uptime initial constraint should exist' + # Check that uptime initial constraint exists in the model (batched naming) + assert 'component|uptime|initial' in model.constraints, 'Uptime initial constraint should exist' else: # When no previous flow rate, no uptime initialization needed pass diff --git a/tests/test_flow.py b/tests/test_flow.py index 2feabf39e..c5a91bcca 100644 --- a/tests/test_flow.py +++ b/tests/test_flow.py @@ -502,7 +502,7 @@ def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_conf assert 'flow|uptime|ub' in model.constraints assert 'flow|uptime|forward' in model.constraints assert 'flow|uptime|backward' in model.constraints - assert 'flow|uptime|initial_ub' in model.constraints + assert 'flow|uptime|initial' in model.constraints # Get individual flow variables uptime = model.variables['flow|uptime'].sel(flow=flow_label, drop=True) @@ -532,9 +532,10 @@ def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_conf + (status.isel(time=slice(1, None)) - 1) * mega, ) + # duration[0] = (previous_duration + dt[0]) * state[0]; previous_uptime=0 assert_conequal( - model.constraints['flow|uptime|initial_ub'].sel(flow=flow_label, drop=True), - uptime.isel(time=0) <= status.isel(time=0) * model.timestep_duration.isel(time=0), + model.constraints['flow|uptime|initial'].sel(flow=flow_label, drop=True), + uptime.isel(time=0) == status.isel(time=0) * model.timestep_duration.isel(time=0), ) def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, coords_config): @@ -564,7 +565,7 @@ def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, co assert 'flow|uptime|ub' in model.constraints assert 'flow|uptime|forward' in model.constraints assert 'flow|uptime|backward' in model.constraints - assert 'flow|uptime|initial_lb' in model.constraints + assert 'flow|uptime|initial' in model.constraints # Get individual flow variables uptime = model.variables['flow|uptime'].sel(flow=flow_label, drop=True) @@ -595,7 +596,7 @@ def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, co ) # Check that initial constraint exists (with previous uptime incorporated) - assert 'flow|uptime|initial_lb' in model.constraints + assert 'flow|uptime|initial' in model.constraints def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_config): """Test flow with minimum and maximum consecutive inactive hours.""" @@ -624,7 +625,7 @@ def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_con assert 'flow|downtime|ub' in model.constraints assert 'flow|downtime|forward' in model.constraints assert 'flow|downtime|backward' in model.constraints - assert 'flow|downtime|initial_ub' in model.constraints + assert 'flow|downtime|initial' in model.constraints # Get individual flow variables downtime = model.variables['flow|downtime'].sel(flow=flow_label, drop=True) @@ -656,9 +657,10 @@ def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_con + (inactive.isel(time=slice(1, None)) - 1) * mega, ) + # duration[0] = (previous_duration + dt[0]) * state[0]; previous_downtime=1h assert_conequal( - model.constraints['flow|downtime|initial_ub'].sel(flow=flow_label, drop=True), - downtime.isel(time=0) <= inactive.isel(time=0) * (model.timestep_duration.isel(time=0) * (1 + 1)), + model.constraints['flow|downtime|initial'].sel(flow=flow_label, drop=True), + downtime.isel(time=0) == inactive.isel(time=0) * (model.timestep_duration.isel(time=0) * (1 + 1)), ) def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, coords_config): @@ -688,7 +690,7 @@ def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, c assert 'flow|downtime|ub' in model.constraints assert 'flow|downtime|forward' in model.constraints assert 'flow|downtime|backward' in model.constraints - assert 'flow|downtime|initial_lb' in model.constraints + assert 'flow|downtime|initial' in model.constraints # Get individual flow variables downtime = model.variables['flow|downtime'].sel(flow=flow_label, drop=True) @@ -719,7 +721,7 @@ def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, c ) # Check that initial constraint exists (with previous downtime incorporated) - assert 'flow|downtime|initial_lb' in model.constraints + assert 'flow|downtime|initial' in model.constraints def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_config): """Test flow with constraints on the number of startups."""